diff --git a/keystone/api/_shared/EC2_S3_Resource.py b/keystone/api/_shared/EC2_S3_Resource.py index 7b2fc21b29..7bb79683e0 100644 --- a/keystone/api/_shared/EC2_S3_Resource.py +++ b/keystone/api/_shared/EC2_S3_Resource.py @@ -46,10 +46,11 @@ class ResourceBase(ks_flask.ResourceBase): def _check_timestamp(credentials): timestamp = ( # AWS Signature v1/v2 - credentials.get('params', {}).get('Timestamp') or + credentials.get('params', {}).get('Timestamp') + or # AWS Signature v4 - credentials.get('headers', {}).get('X-Amz-Date') or - credentials.get('params', {}).get('X-Amz-Date') + credentials.get('headers', {}).get('X-Amz-Date') + or credentials.get('params', {}).get('X-Amz-Date') ) if not timestamp: # If the signed payload doesn't include a timestamp then the signer @@ -60,12 +61,12 @@ class ResourceBase(ks_flask.ResourceBase): timestamp = timeutils.normalize_time(timestamp) except Exception as e: raise ks_exceptions.Unauthorized( - _('Credential timestamp is invalid: %s') % e) + _('Credential timestamp is invalid: %s') % e + ) auth_ttl = datetime.timedelta(minutes=CONF.credential.auth_ttl) current_time = timeutils.normalize_time(timeutils.utcnow()) if current_time > timestamp + auth_ttl: - raise ks_exceptions.Unauthorized( - _('Credential is expired')) + raise ks_exceptions.Unauthorized(_('Credential is expired')) def handle_authenticate(self): # TODO(morgan): convert this dirty check to JSON Schema validation @@ -86,9 +87,9 @@ class ResourceBase(ks_flask.ResourceBase): # Try "credentials" then "credential" and THEN ec2Credentials. Final # default is {} credentials = ( - self.request_body_json.get('credentials') or - self.request_body_json.get('credential') or - self.request_body_json.get('ec2Credentials') + self.request_body_json.get('credentials') + or self.request_body_json.get('credential') + or self.request_body_json.get('ec2Credentials') ) if not credentials: credentials = {} @@ -116,21 +117,24 @@ class ResourceBase(ks_flask.ResourceBase): secret=loaded.get('secret'), trust_id=loaded.get('trust_id'), app_cred_id=loaded.get('app_cred_id'), - access_token_id=loaded.get('access_token_id') + access_token_id=loaded.get('access_token_id'), ) # validate the signature self._check_signature(cred_data, credentials) project_ref = PROVIDERS.resource_api.get_project( - cred_data['project_id']) + cred_data['project_id'] + ) user_ref = PROVIDERS.identity_api.get_user(cred_data['user_id']) # validate that the auth info is valid and nothing is disabled try: PROVIDERS.identity_api.assert_user_enabled( - user_id=user_ref['id'], user=user_ref) + user_id=user_ref['id'], user=user_ref + ) PROVIDERS.resource_api.assert_project_enabled( - project_id=project_ref['id'], project=project_ref) + project_id=project_ref['id'], project=project_ref + ) except AssertionError as e: raise ks_exceptions.Unauthorized from e @@ -153,16 +157,19 @@ class ResourceBase(ks_flask.ResourceBase): elif cred_data['app_cred_id']: ac_client = PROVIDERS.application_credential_api app_cred = ac_client.get_application_credential( - cred_data['app_cred_id']) + cred_data['app_cred_id'] + ) roles = [r['id'] for r in app_cred['roles']] elif cred_data['access_token_id']: access_token = PROVIDERS.oauth_api.get_access_token( - cred_data['access_token_id']) + cred_data['access_token_id'] + ) roles = jsonutils.loads(access_token['role_ids']) auth_context = {'access_token_id': cred_data['access_token_id']} else: roles = PROVIDERS.assignment_api.get_roles_for_user_and_project( - user_ref['id'], project_ref['id']) + user_ref['id'], project_ref['id'] + ) if not roles: raise ks_exceptions.Unauthorized(_('User not valid for project.')) @@ -178,9 +185,11 @@ class ResourceBase(ks_flask.ResourceBase): else: user_id = user_ref['id'] token = PROVIDERS.token_provider_api.issue_token( - user_id=user_id, method_names=method_names, + user_id=user_id, + method_names=method_names, project_id=project_ref['id'], trust_id=cred_data['trust_id'], app_cred_id=cred_data['app_cred_id'], - auth_context=auth_context) + auth_context=auth_context, + ) return token diff --git a/keystone/api/_shared/authentication.py b/keystone/api/_shared/authentication.py index 284b5b97c8..ac613d6552 100644 --- a/keystone/api/_shared/authentication.py +++ b/keystone/api/_shared/authentication.py @@ -33,9 +33,7 @@ PROVIDERS = provider_api.ProviderAPIs def _check_and_set_default_scoping(auth_info, auth_context): - (domain_id, project_id, trust, unscoped, system) = ( - auth_info.get_scope() - ) + (domain_id, project_id, trust, unscoped, system) = auth_info.get_scope() if trust: project_id = trust['project_id'] if system or domain_id or project_id or trust: @@ -65,37 +63,53 @@ def _check_and_set_default_scoping(auth_info, auth_context): # make sure user's default project is legit before scoping to it try: default_project_ref = PROVIDERS.resource_api.get_project( - default_project_id) + default_project_id + ) default_project_domain_ref = PROVIDERS.resource_api.get_domain( - default_project_ref['domain_id']) - if (default_project_ref.get('enabled', True) and - default_project_domain_ref.get('enabled', True)): + default_project_ref['domain_id'] + ) + if default_project_ref.get( + 'enabled', True + ) and default_project_domain_ref.get('enabled', True): if PROVIDERS.assignment_api.get_roles_for_user_and_project( - user_ref['id'], default_project_id): + user_ref['id'], default_project_id + ): auth_info.set_scope(project_id=default_project_id) else: - msg = ("User %(user_id)s doesn't have access to" - " default project %(project_id)s. The token" - " will be unscoped rather than scoped to the" - " project.") - LOG.debug(msg, - {'user_id': user_ref['id'], - 'project_id': default_project_id}) + msg = ( + "User %(user_id)s doesn't have access to" + " default project %(project_id)s. The token" + " will be unscoped rather than scoped to the" + " project." + ) + LOG.debug( + msg, + { + 'user_id': user_ref['id'], + 'project_id': default_project_id, + }, + ) else: - msg = ("User %(user_id)s's default project %(project_id)s" - " is disabled. The token will be unscoped rather" - " than scoped to the project.") - LOG.debug(msg, - {'user_id': user_ref['id'], - 'project_id': default_project_id}) + msg = ( + "User %(user_id)s's default project %(project_id)s" + " is disabled. The token will be unscoped rather" + " than scoped to the project." + ) + LOG.debug( + msg, + {'user_id': user_ref['id'], 'project_id': default_project_id}, + ) except (exception.ProjectNotFound, exception.DomainNotFound): # default project or default project domain doesn't exist, # will issue unscoped token instead - msg = ("User %(user_id)s's default project %(project_id)s not" - " found. The token will be unscoped rather than" - " scoped to the project.") - LOG.debug(msg, {'user_id': user_ref['id'], - 'project_id': default_project_id}) + msg = ( + "User %(user_id)s's default project %(project_id)s not" + " found. The token will be unscoped rather than" + " scoped to the project." + ) + LOG.debug( + msg, {'user_id': user_ref['id'], 'project_id': default_project_id} + ) def authenticate(auth_info, auth_context): @@ -112,9 +126,12 @@ def authenticate(auth_info, auth_context): '`authenticate` method is not of type ' '`keystone.auth.core.AuthContext`. For security ' 'purposes this is required. This is likely a programming ' - 'error. Received object of type `%s`', type(auth_context)) + 'error. Received object of type `%s`', + type(auth_context), + ) raise exception.Unauthorized( - _('Cannot Authenticate due to internal error.')) + _('Cannot Authenticate due to internal error.') + ) # The 'external' method allows any 'REMOTE_USER' based authentication # In some cases the server can set REMOTE_USER as '' instead of # dropping it, so this must be filtered out @@ -125,8 +142,9 @@ def authenticate(auth_info, auth_context): if resp and resp.status: # NOTE(notmorgan): ``external`` plugin cannot be multi-step # it is either a plain success/fail. - auth_context.setdefault( - 'method_names', []).insert(0, 'external') + auth_context.setdefault('method_names', []).insert( + 0, 'external' + ) # NOTE(notmorgan): All updates to auth_context is handled # here in the .authenticate method. auth_context.update(resp.response_data or {}) @@ -152,13 +170,13 @@ def authenticate(auth_info, auth_context): resp = method.authenticate(auth_info.get_method_data(method_name)) if resp: if resp.status: - auth_context.setdefault( - 'method_names', []).insert(0, method_name) + auth_context.setdefault('method_names', []).insert( + 0, method_name + ) # NOTE(notmorgan): All updates to auth_context is handled # here in the .authenticate method. If the auth attempt was # not successful do not update the auth_context - resp_method_names = resp.response_data.pop( - 'method_names', []) + resp_method_names = resp.response_data.pop('method_names', []) auth_context['method_names'].extend(resp_method_names) auth_context.update(resp.response_data or {}) elif resp.response_body: @@ -180,8 +198,7 @@ def authenticate_for_token(auth=None): """Authenticate user and issue a token.""" try: auth_info = core.AuthInfo.create(auth=auth) - auth_context = core.AuthContext(method_names=[], - bind={}) + auth_context = core.AuthContext(method_names=[], bind={}) authenticate(auth_info, auth_context) if auth_context.get('access_token_id'): auth_info.set_scope(None, auth_context['project_id'], None) @@ -201,7 +218,8 @@ def authenticate_for_token(auth=None): # the given receipt. if receipt: method_names_set = set( - auth_context.get('method_names', []) + receipt.methods) + auth_context.get('method_names', []) + receipt.methods + ) else: method_names_set = set(auth_context.get('method_names', [])) method_names = list(method_names_set) @@ -213,19 +231,27 @@ def authenticate_for_token(auth=None): # Do MFA Rule Validation for the user if not core.UserMFARulesValidator.check_auth_methods_against_rules( - auth_context['user_id'], method_names_set): + auth_context['user_id'], method_names_set + ): raise exception.InsufficientAuthMethods( - user_id=auth_context['user_id'], - methods=method_names) + user_id=auth_context['user_id'], methods=method_names + ) expires_at = auth_context.get('expires_at') token_audit_id = auth_context.get('audit_id') token = PROVIDERS.token_provider_api.issue_token( - auth_context['user_id'], method_names, expires_at=expires_at, - system=system, project_id=project_id, domain_id=domain_id, - auth_context=auth_context, trust_id=trust_id, - app_cred_id=app_cred_id, parent_audit_id=token_audit_id) + auth_context['user_id'], + method_names, + expires_at=expires_at, + system=system, + project_id=project_id, + domain_id=domain_id, + auth_context=auth_context, + trust_id=trust_id, + app_cred_id=app_cred_id, + parent_audit_id=token_audit_id, + ) # NOTE(wanghong): We consume a trust use only when we are using # trusts and have successfully issued a token. @@ -244,8 +270,8 @@ def federated_authenticate_for_token(identity_provider, protocol_id): 'methods': [protocol_id], protocol_id: { 'identity_provider': identity_provider, - 'protocol': protocol_id - } + 'protocol': protocol_id, + }, } } return authenticate_for_token(auth) diff --git a/keystone/api/_shared/implied_roles.py b/keystone/api/_shared/implied_roles.py index 3da6159203..9397d72785 100644 --- a/keystone/api/_shared/implied_roles.py +++ b/keystone/api/_shared/implied_roles.py @@ -25,20 +25,19 @@ PROVIDERS = provider_api.ProviderAPIs def build_prior_role_response_data(prior_role_id, prior_role_name): return { 'id': prior_role_id, - 'links': { - 'self': ks_flask.base_url(path='/roles/%s' % prior_role_id) - }, - 'name': prior_role_name} + 'links': {'self': ks_flask.base_url(path='/roles/%s' % prior_role_id)}, + 'name': prior_role_name, + } def build_implied_role_response_data(implied_role): return { 'id': implied_role['id'], 'links': { - 'self': ks_flask.base_url( - path='/roles/%s' % implied_role['id']) + 'self': ks_flask.base_url(path='/roles/%s' % implied_role['id']) }, - 'name': implied_role['name']} + 'name': implied_role['name'], + } def role_inference_response(prior_role_id): @@ -46,5 +45,8 @@ def role_inference_response(prior_role_id): response = { 'role_inference': { 'prior_role': build_prior_role_response_data( - prior_role_id, prior_role['name'])}} + prior_role_id, prior_role['name'] + ) + } + } return response diff --git a/keystone/api/_shared/json_home_relations.py b/keystone/api/_shared/json_home_relations.py index 997fcca527..bab90b8220 100644 --- a/keystone/api/_shared/json_home_relations.py +++ b/keystone/api/_shared/json_home_relations.py @@ -22,74 +22,108 @@ from keystone.common import json_home # OS-EC2 "extension" os_ec2_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-EC2', extension_version='1.0') + extension_name='OS-EC2', + extension_version='1.0', +) # s3token "extension" s3_token_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='s3tokens', extension_version='1.0') + extension_name='s3tokens', + extension_version='1.0', +) # OS-EP-FILTER "extension" os_ep_filter_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-EP-FILTER', extension_version='1.0') + extension_name='OS-EP-FILTER', + extension_version='1.0', +) os_ep_filter_parameter_rel_func = functools.partial( json_home.build_v3_extension_parameter_relation, - extension_name='OS-EP-FILTER', extension_version='1.0') + extension_name='OS-EP-FILTER', + extension_version='1.0', +) # OS-OAUTH1 "extension" os_oauth1_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-OAUTH1', extension_version='1.0') + extension_name='OS-OAUTH1', + extension_version='1.0', +) os_oauth1_parameter_rel_func = functools.partial( json_home.build_v3_extension_parameter_relation, - extension_name='OS-OAUTH1', extension_version='1.0') + extension_name='OS-OAUTH1', + extension_version='1.0', +) # OS-OAUTH2 "extension" os_oauth2_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-OAUTH2', extension_version='1.0') + extension_name='OS-OAUTH2', + extension_version='1.0', +) os_oauth2_parameter_rel_func = functools.partial( json_home.build_v3_extension_parameter_relation, - extension_name='OS-OAUTH2', extension_version='1.0') + extension_name='OS-OAUTH2', + extension_version='1.0', +) # OS-REVOKE "extension" os_revoke_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-REVOKE', extension_version='1.0') + extension_name='OS-REVOKE', + extension_version='1.0', +) # OS-SIMPLE-CERT "extension" os_simple_cert_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-SIMPLE-CERT', extension_version='1.0') + extension_name='OS-SIMPLE-CERT', + extension_version='1.0', +) # OS-TRUST "extension" os_trust_resource_rel_func = functools.partial( - json_home.build_v3_extension_resource_relation, extension_name='OS-TRUST', - extension_version='1.0') + json_home.build_v3_extension_resource_relation, + extension_name='OS-TRUST', + extension_version='1.0', +) os_trust_parameter_rel_func = functools.partial( - json_home.build_v3_extension_parameter_relation, extension_name='OS-TRUST', - extension_version='1.0') + json_home.build_v3_extension_parameter_relation, + extension_name='OS-TRUST', + extension_version='1.0', +) # OS-ENDPOINT-POLICY "extension" os_endpoint_policy_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-ENDPOINT-POLICY', extension_version='1.0') + extension_name='OS-ENDPOINT-POLICY', + extension_version='1.0', +) # OS-FEDERATION "extension" os_federation_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-FEDERATION', extension_version='1.0') + extension_name='OS-FEDERATION', + extension_version='1.0', +) os_federation_parameter_rel_func = functools.partial( json_home.build_v3_extension_parameter_relation, - extension_name='OS-FEDERATION', extension_version='1.0') + extension_name='OS-FEDERATION', + extension_version='1.0', +) # OS-INHERIT "extension" os_inherit_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-INHERIT', extension_version='1.0') + extension_name='OS-INHERIT', + extension_version='1.0', +) # OS-PKI (revoked) "extension" os_pki_resource_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='OS-PKI', extension_version='1.0') + extension_name='OS-PKI', + extension_version='1.0', +) diff --git a/keystone/api/_shared/saml.py b/keystone/api/_shared/saml.py index 956acb8c9b..a9db40873c 100644 --- a/keystone/api/_shared/saml.py +++ b/keystone/api/_shared/saml.py @@ -35,8 +35,10 @@ def create_base_saml_assertion(auth): token = PROVIDERS.token_provider_api.validate_token(token_id) if not token.project_scoped: - action = _('Use a project scoped token when attempting to create ' - 'a SAML assertion') + action = _( + 'Use a project scoped token when attempting to create ' + 'a SAML assertion' + ) raise exception.ForbiddenAction(action=action) subject = token.user['name'] @@ -58,19 +60,27 @@ def create_base_saml_assertion(auth): 'JSON:{"name":"group2","domain":{"name":"Default"}}'] """ user_groups = [] - groups = PROVIDERS.identity_api.list_groups_for_user( - token.user_id) + groups = PROVIDERS.identity_api.list_groups_for_user(token.user_id) for group in groups: user_group = {} group_domain_name = PROVIDERS.resource_api.get_domain( - group['domain_id'])['name'] + group['domain_id'] + )['name'] user_group["name"] = group['name'] user_group["domain"] = {'name': group_domain_name} user_groups.append('JSON:' + jsonutils.dumps(user_group)) return user_groups + groups = group_membership() generator = keystone_idp.SAMLGenerator() response = generator.samlize_token( - issuer, sp_url, subject, subject_domain_name, - role_names, project, project_domain_name, groups) + issuer, + sp_url, + subject, + subject_domain_name, + role_names, + project, + project_domain_name, + groups, + ) return response, service_provider diff --git a/keystone/api/auth.py b/keystone/api/auth.py index b4b8300480..3224848032 100644 --- a/keystone/api/auth.py +++ b/keystone/api/auth.py @@ -58,9 +58,11 @@ def _combine_lists_uniquely(a, b): def _build_response_headers(service_provider): # URLs in header are encoded into bytes - return [('Content-Type', 'text/xml'), - ('X-sp-url', service_provider['sp_url'].encode('utf-8')), - ('X-auth-url', service_provider['auth_url'].encode('utf-8'))] + return [ + ('Content-Type', 'text/xml'), + ('X-sp-url', service_provider['sp_url'].encode('utf-8')), + ('X-auth-url', service_provider['auth_url'].encode('utf-8')), + ] def _get_sso_origin_host(): @@ -87,13 +89,14 @@ def _get_sso_origin_host(): host = urllib.parse.unquote_plus(origin) # change trusted_dashboard hostnames to lowercase before comparison - trusted_dashboards = [k_utils.lower_case_hostname(trusted) - for trusted in CONF.federation.trusted_dashboard] + trusted_dashboards = [ + k_utils.lower_case_hostname(trusted) + for trusted in CONF.federation.trusted_dashboard + ] if host not in trusted_dashboards: msg = '%(host)s is not a trusted dashboard host' % {'host': host} - tr_msg = _('%(host)s is not a trusted dashboard host') % { - 'host': host} + tr_msg = _('%(host)s is not a trusted dashboard host') % {'host': host} LOG.error(msg) raise exception.Unauthorized(tr_msg) @@ -133,14 +136,16 @@ class AuthProjectsResource(ks_flask.ResourceBase): if user_id: try: user_p_refs = PROVIDERS.assignment_api.list_projects_for_user( - user_id) + user_id + ) except exception.UserNotFound: # nosec # federated users have an id but they don't link to anything pass if group_ids: grp_p_refs = PROVIDERS.assignment_api.list_projects_for_groups( - group_ids) + group_ids + ) refs = _combine_lists_uniquely(user_p_refs, grp_p_refs) return self.wrap_collection(refs) @@ -165,14 +170,16 @@ class AuthDomainsResource(ks_flask.ResourceBase): if user_id: try: user_d_refs = PROVIDERS.assignment_api.list_domains_for_user( - user_id) + user_id + ) except exception.UserNotFound: # nosec # federated users have an id but they don't link to anything pass if group_ids: grp_d_refs = PROVIDERS.assignment_api.list_domains_for_groups( - group_ids) + group_ids + ) refs = _combine_lists_uniquely(user_d_refs, grp_d_refs) return self.wrap_collection(refs) @@ -195,7 +202,8 @@ class AuthSystemResource(_AuthFederationWebSSOBase): try: user_assignments = ( PROVIDERS.assignment_api.list_system_grants_for_user( - user_id) + user_id + ) ) except exception.UserNotFound: # nosec # federated users have an id but they don't link to anything @@ -204,25 +212,23 @@ class AuthSystemResource(_AuthFederationWebSSOBase): if group_ids: group_assignments = ( PROVIDERS.assignment_api.list_system_grants_for_groups( - group_ids) + group_ids + ) ) assignments = _combine_lists_uniquely( - user_assignments, group_assignments) + user_assignments, group_assignments + ) if assignments: response = { 'system': [{'all': True}], - 'links': { - 'self': ks_flask.base_url(path='auth/system') - } + 'links': {'self': ks_flask.base_url(path='auth/system')}, } else: response = { 'system': [], - 'links': { - 'self': ks_flask.base_url(path='auth/system') - } + 'links': {'self': ks_flask.base_url(path='auth/system')}, } return response @@ -239,16 +245,17 @@ class AuthCatalogResource(_AuthFederationWebSSOBase): if not project_id: raise exception.Forbidden( - _('A project-scoped token is required to produce a ' - 'service catalog.')) + _( + 'A project-scoped token is required to produce a ' + 'service catalog.' + ) + ) return { 'catalog': PROVIDERS.catalog_api.get_v3_catalog( user_id, project_id ), - 'links': { - 'self': ks_flask.base_url(path='auth/catalog') - } + 'links': {'self': ks_flask.base_url(path='auth/catalog')}, } @@ -285,18 +292,24 @@ class AuthTokenResource(_AuthFederationWebSSOBase): ENFORCER.enforce_call(action='identity:validate_token') token_id = flask.request.headers.get( - authorization.SUBJECT_TOKEN_HEADER) + authorization.SUBJECT_TOKEN_HEADER + ) access_rules_support = flask.request.headers.get( - authorization.ACCESS_RULES_HEADER) + authorization.ACCESS_RULES_HEADER + ) allow_expired = strutils.bool_from_string( - flask.request.args.get('allow_expired')) + flask.request.args.get('allow_expired') + ) window_secs = CONF.token.allow_expired_window if allow_expired else 0 include_catalog = 'nocatalog' not in flask.request.args token = PROVIDERS.token_provider_api.validate_token( - token_id, window_seconds=window_secs, - access_rules_support=access_rules_support) + token_id, + window_seconds=window_secs, + access_rules_support=access_rules_support, + ) token_resp = render_token.render_token_response_from_model( - token, include_catalog=include_catalog) + token, include_catalog=include_catalog + ) resp_body = jsonutils.dumps(token_resp) response = flask.make_response(resp_body, http.client.OK) response.headers['X-Subject-Token'] = token_id @@ -329,7 +342,8 @@ class AuthTokenResource(_AuthFederationWebSSOBase): """ ENFORCER.enforce_call(action='identity:revoke_token') token_id = flask.request.headers.get( - authorization.SUBJECT_TOKEN_HEADER) + authorization.SUBJECT_TOKEN_HEADER + ) PROVIDERS.token_provider_api.revoke_token(token_id) return None, http.client.NO_CONTENT @@ -342,7 +356,8 @@ class AuthFederationWebSSOResource(_AuthFederationWebSSOBase): for idp in idps: try: remote_id_name = federation_utils.get_remote_id_parameter( - idp, protocol_id) + idp, protocol_id + ) except exception.FederatedProtocolNotFound: # no protocol for this IdP, so this can't be the IdP we're # looking for @@ -360,7 +375,8 @@ class AuthFederationWebSSOResource(_AuthFederationWebSSOBase): ref = PROVIDERS.federation_api.get_idp_from_remote_id(remote_id) identity_provider = ref['idp_id'] token = authentication.federated_authenticate_for_token( - identity_provider=identity_provider, protocol_id=protocol_id) + identity_provider=identity_provider, protocol_id=protocol_id + ) return cls._render_template_response(host, token.id) @ks_flask.unenforced_api @@ -378,7 +394,8 @@ class AuthFederationWebSSOIDPsResource(_AuthFederationWebSSOBase): host = _get_sso_origin_host() token = authentication.federated_authenticate_for_token( - identity_provider=idp_id, protocol_id=protocol_id) + identity_provider=idp_id, protocol_id=protocol_id + ) return cls._render_template_response(host, token.id) @ks_flask.unenforced_api @@ -423,15 +440,18 @@ class AuthFederationSaml2ECPResource(_AuthFederationWebSSOBase): auth = self.request_body_json.get('auth') validation.lazy_validate(federation_schema.saml_create, auth) saml_assertion, service_provider = saml.create_base_saml_assertion( - auth) + auth + ) relay_state_prefix = service_provider['relay_state_prefix'] generator = keystone_idp.ECPGenerator() ecp_assertion = generator.generate_ecp( - saml_assertion, relay_state_prefix) + saml_assertion, relay_state_prefix + ) headers = _build_response_headers(service_provider) response = flask.make_response( - ecp_assertion.to_string(), http.client.OK) + ecp_assertion.to_string(), http.client.OK + ) for header, value in headers: response.headers[header] = value return response @@ -445,29 +465,34 @@ class AuthAPI(ks_flask.APIBase): ks_flask.construct_resource_map( resource=AuthProjectsResource, url='/auth/projects', - alternate_urls=[dict( - url='/OS-FEDERATION/projects', - json_home=ks_flask.construct_json_home_data( - rel='projects', - resource_relation_func=( - json_home_relations.os_federation_resource_rel_func) + alternate_urls=[ + dict( + url='/OS-FEDERATION/projects', + json_home=ks_flask.construct_json_home_data( + rel='projects', + resource_relation_func=( + json_home_relations.os_federation_resource_rel_func + ), + ), ) - )], - + ], rel='auth_projects', - resource_kwargs={} + resource_kwargs={}, ), ks_flask.construct_resource_map( resource=AuthDomainsResource, url='/auth/domains', - alternate_urls=[dict( - url='/OS-FEDERATION/domains', - json_home=ks_flask.construct_json_home_data( - rel='domains', - resource_relation_func=( - json_home_relations.os_federation_resource_rel_func) + alternate_urls=[ + dict( + url='/OS-FEDERATION/domains', + json_home=ks_flask.construct_json_home_data( + rel='domains', + resource_relation_func=( + json_home_relations.os_federation_resource_rel_func + ), + ), ) - )], + ], rel='auth_domains', resource_kwargs={}, ), @@ -475,27 +500,27 @@ class AuthAPI(ks_flask.APIBase): resource=AuthSystemResource, url='/auth/system', resource_kwargs={}, - rel='auth_system' + rel='auth_system', ), ks_flask.construct_resource_map( resource=AuthCatalogResource, url='/auth/catalog', resource_kwargs={}, - rel='auth_catalog' + rel='auth_catalog', ), ks_flask.construct_resource_map( resource=AuthTokenOSPKIResource, url='/auth/tokens/OS-PKI/revoked', resource_kwargs={}, rel='revocations', - resource_relation_func=json_home_relations.os_pki_resource_rel_func + resource_relation_func=json_home_relations.os_pki_resource_rel_func, ), ks_flask.construct_resource_map( resource=AuthTokenResource, url='/auth/tokens', resource_kwargs={}, - rel='auth_tokens' - ) + rel='auth_tokens', + ), ] @@ -509,16 +534,18 @@ class AuthFederationAPI(ks_flask.APIBase): url='/auth/OS-FEDERATION/saml2', resource_kwargs={}, resource_relation_func=( - json_home_relations.os_federation_resource_rel_func), - rel='saml2' + json_home_relations.os_federation_resource_rel_func + ), + rel='saml2', ), ks_flask.construct_resource_map( resource=AuthFederationSaml2ECPResource, url='/auth/OS-FEDERATION/saml2/ecp', resource_kwargs={}, resource_relation_func=( - json_home_relations.os_federation_resource_rel_func), - rel='ecp' + json_home_relations.os_federation_resource_rel_func + ), + rel='ecp', ), ks_flask.construct_resource_map( resource=AuthFederationWebSSOResource, @@ -526,28 +553,40 @@ class AuthFederationAPI(ks_flask.APIBase): resource_kwargs={}, rel='websso', resource_relation_func=( - json_home_relations.os_federation_resource_rel_func), + json_home_relations.os_federation_resource_rel_func + ), path_vars={ 'protocol_id': ( json_home_relations.os_federation_parameter_rel_func( - parameter_name='protocol_id'))} + parameter_name='protocol_id' + ) + ) + }, ), ks_flask.construct_resource_map( resource=AuthFederationWebSSOIDPsResource, - url=('/auth/OS-FEDERATION/identity_providers//' - 'protocols//websso'), + url=( + '/auth/OS-FEDERATION/identity_providers//' + 'protocols//websso' + ), resource_kwargs={}, rel='identity_providers_websso', resource_relation_func=( - json_home_relations.os_federation_resource_rel_func), + json_home_relations.os_federation_resource_rel_func + ), path_vars={ 'idp_id': ( json_home_relations.os_federation_parameter_rel_func( - parameter_name='idp_id')), + parameter_name='idp_id' + ) + ), 'protocol_id': ( json_home_relations.os_federation_parameter_rel_func( - parameter_name='protocol_id'))} - ) + parameter_name='protocol_id' + ) + ), + }, + ), ] diff --git a/keystone/api/credentials.py b/keystone/api/credentials.py index 90b53dd686..9c9f343b32 100644 --- a/keystone/api/credentials.py +++ b/keystone/api/credentials.py @@ -65,23 +65,28 @@ class CredentialResource(ks_flask.ResourceBase): blob = jsonutils.loads(ref.get('blob')) except (ValueError, TabError): raise exception.ValidationError( - message=_('Invalid blob in credential')) + message=_('Invalid blob in credential') + ) if not blob or not isinstance(blob, dict): - raise exception.ValidationError(attribute='blob', - target='credential') + raise exception.ValidationError( + attribute='blob', target='credential' + ) if blob.get('access') is None: - raise exception.ValidationError(attribute='access', - target='credential') + raise exception.ValidationError( + attribute='access', target='credential' + ) return blob def _assign_unique_id( - self, ref, trust_id=None, app_cred_id=None, access_token_id=None): + self, ref, trust_id=None, app_cred_id=None, access_token_id=None + ): # Generates an assigns a unique identifier to a credential reference. if ref.get('type', '').lower() == 'ec2': blob = self._validate_blob_json(ref) ref = ref.copy() ref['id'] = hashlib.sha256( - blob['access'].encode('utf8')).hexdigest() + blob['access'].encode('utf8') + ).hexdigest() # update the blob with the trust_id or app_cred_id, so credentials # created with a trust- or app cred-scoped token will result in # trust- or app cred-scoped tokens when authentication via @@ -105,8 +110,11 @@ class CredentialResource(ks_flask.ResourceBase): target = {'credential': {'user_id': self.oslo_context.user_id}} else: target = None - ENFORCER.enforce_call(action='identity:list_credentials', - filters=filters, target_attr=target) + ENFORCER.enforce_call( + action='identity:list_credentials', + filters=filters, + target_attr=target, + ) hints = self.build_driver_hints(filters) refs = PROVIDERS.credential_api.list_credentials(hints) # If the request was filtered, make sure to return only the @@ -122,7 +130,7 @@ class CredentialResource(ks_flask.ResourceBase): cred = PROVIDERS.credential_api.get_credential(ref['id']) ENFORCER.enforce_call( action='identity:get_credential', - target_attr={'credential': cred} + target_attr={'credential': cred}, ) filtered_refs.append(ref) except exception.Forbidden: @@ -134,7 +142,7 @@ class CredentialResource(ks_flask.ResourceBase): def _get_credential(self, credential_id): ENFORCER.enforce_call( action='identity:get_credential', - build_target=_build_target_enforcement + build_target=_build_target_enforcement, ) credential = PROVIDERS.credential_api.get_credential(credential_id) return self.wrap_member(self._blob_to_json(credential)) @@ -158,15 +166,20 @@ class CredentialResource(ks_flask.ResourceBase): validation.lazy_validate(schema.credential_create, credential) trust_id = getattr(self.oslo_context, 'trust_id', None) app_cred_id = getattr( - self.auth_context['token'], 'application_credential_id', None) + self.auth_context['token'], 'application_credential_id', None + ) access_token_id = getattr( - self.auth_context['token'], 'access_token_id', None) + self.auth_context['token'], 'access_token_id', None + ) ref = self._assign_unique_id( self._normalize_dict(credential), - trust_id=trust_id, app_cred_id=app_cred_id, - access_token_id=access_token_id) + trust_id=trust_id, + app_cred_id=app_cred_id, + access_token_id=access_token_id, + ) ref = PROVIDERS.credential_api.create_credential( - ref['id'], ref, initiator=self.audit_initiator) + ref['id'], ref, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def _validate_blob_update_keys(self, credential, ref): @@ -176,8 +189,12 @@ class CredentialResource(ks_flask.ResourceBase): if isinstance(old_blob, str): old_blob = jsonutils.loads(old_blob) # if there was a scope set, prevent changing it or unsetting it - for key in ['trust_id', 'app_cred_id', 'access_token_id', - 'access_id']: + for key in [ + 'trust_id', + 'app_cred_id', + 'access_token_id', + 'access_id', + ]: if old_blob.get(key) != new_blob.get(key): message = _('%s can not be updated for credential') % key raise exception.ValidationError(message=message) @@ -186,7 +203,7 @@ class CredentialResource(ks_flask.ResourceBase): # Update Credential ENFORCER.enforce_call( action='identity:update_credential', - build_target=_build_target_enforcement + build_target=_build_target_enforcement, ) current = PROVIDERS.credential_api.get_credential(credential_id) @@ -200,19 +217,23 @@ class CredentialResource(ks_flask.ResourceBase): action='identity:update_credential', target_attr=target ) ref = PROVIDERS.credential_api.update_credential( - credential_id, credential) + credential_id, credential + ) return self.wrap_member(ref) def delete(self, credential_id): # Delete credentials ENFORCER.enforce_call( action='identity:delete_credential', - build_target=_build_target_enforcement + build_target=_build_target_enforcement, ) - return (PROVIDERS.credential_api.delete_credential( - credential_id, initiator=self.audit_initiator), - http.client.NO_CONTENT) + return ( + PROVIDERS.credential_api.delete_credential( + credential_id, initiator=self.audit_initiator + ), + http.client.NO_CONTENT, + ) class CredentialAPI(ks_flask.APIBase): diff --git a/keystone/api/discovery.py b/keystone/api/discovery.py index 7483597b81..3829b54650 100644 --- a/keystone/api/discovery.py +++ b/keystone/api/discovery.py @@ -31,14 +31,15 @@ def _get_versions_list(identity_url): 'id': 'v3.14', 'status': 'stable', 'updated': '2020-04-07T00:00:00Z', - 'links': [{ - 'rel': 'self', - 'href': identity_url, - }], - 'media-types': [{ - 'base': 'application/json', - 'type': MEDIA_TYPE_JSON % 'v3' - }] + 'links': [ + { + 'rel': 'self', + 'href': identity_url, + } + ], + 'media-types': [ + {'base': 'application/json', 'type': MEDIA_TYPE_JSON % 'v3'} + ], } return versions @@ -53,7 +54,8 @@ def v3_mime_type_best_match(): return MimeTypes.JSON return request.accept_mimetypes.best_match( - [MimeTypes.JSON, MimeTypes.JSON_HOME]) + [MimeTypes.JSON, MimeTypes.JSON_HOME] + ) @_DISCOVERY_BLUEPRINT.route('/') @@ -63,8 +65,10 @@ def get_versions(): # understand the JSON-Home document. v3_json_home = json_home.JsonHomeResources.resources() json_home.translate_urls(v3_json_home, '/v3') - return flask.Response(response=jsonutils.dumps(v3_json_home), - mimetype=MimeTypes.JSON_HOME) + return flask.Response( + response=jsonutils.dumps(v3_json_home), + mimetype=MimeTypes.JSON_HOME, + ) else: identity_url = '%s/' % ks_flask.base_url() versions = _get_versions_list(identity_url) @@ -76,10 +80,11 @@ def get_versions(): response = flask.Response( response=jsonutils.dumps( - {'versions': { - 'values': list(versions.values())}}), + {'versions': {'values': list(versions.values())}} + ), mimetype=MimeTypes.JSON, - status=http.client.MULTIPLE_CHOICES) + status=http.client.MULTIPLE_CHOICES, + ) response.headers['Location'] = preferred_location return response @@ -90,14 +95,16 @@ def get_version_v3(): # RENDER JSON-Home form, we have a clever client who will # understand the JSON-Home document. content = json_home.JsonHomeResources.resources() - return flask.Response(response=jsonutils.dumps(content), - mimetype=MimeTypes.JSON_HOME) + return flask.Response( + response=jsonutils.dumps(content), mimetype=MimeTypes.JSON_HOME + ) else: identity_url = '%s/' % ks_flask.base_url() versions = _get_versions_list(identity_url) return flask.Response( response=jsonutils.dumps({'version': versions['v3']}), - mimetype=MimeTypes.JSON) + mimetype=MimeTypes.JSON, + ) class DiscoveryAPI(object): diff --git a/keystone/api/domains.py b/keystone/api/domains.py index 0f8477bca7..566676ac30 100644 --- a/keystone/api/domains.py +++ b/keystone/api/domains.py @@ -59,14 +59,16 @@ def _build_enforcement_target(allow_non_existing=False): if flask.request.view_args.get('user_id'): try: target['user'] = PROVIDERS.identity_api.get_user( - flask.request.view_args['user_id']) + flask.request.view_args['user_id'] + ) except exception.UserNotFound: if not allow_non_existing: raise else: try: target['group'] = PROVIDERS.identity_api.get_group( - flask.request.view_args.get('group_id')) + flask.request.view_args.get('group_id') + ) except exception.GroupNotFound: if not allow_non_existing: raise @@ -77,7 +79,8 @@ class DomainResource(ks_flask.ResourceBase): collection_key = 'domains' member_key = 'domain' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='resource_api', method='get_domain') + api='resource_api', method='get_domain' + ) def get(self, domain_id=None): """Get domain or list domains. @@ -92,7 +95,7 @@ class DomainResource(ks_flask.ResourceBase): def _get_domain(self, domain_id): ENFORCER.enforce_call( action='identity:get_domain', - build_target=_build_domain_enforcement_target + build_target=_build_domain_enforcement_target, ) domain = PROVIDERS.resource_api.get_domain(domain_id) return self.wrap_member(domain) @@ -102,16 +105,14 @@ class DomainResource(ks_flask.ResourceBase): target = None if self.oslo_context.domain_id: target = {'domain': {'id': self.oslo_context.domain_id}} - ENFORCER.enforce_call(action='identity:list_domains', - filters=filters, - target_attr=target) + ENFORCER.enforce_call( + action='identity:list_domains', filters=filters, target_attr=target + ) hints = self.build_driver_hints(filters) refs = PROVIDERS.resource_api.list_domains(hints=hints) if self.oslo_context.domain_id: domain_id = self.oslo_context.domain_id - filtered_refs = [ - ref for ref in refs if ref['id'] == domain_id - ] + filtered_refs = [ref for ref in refs if ref['id'] == domain_id] else: filtered_refs = refs return self.wrap_collection(filtered_refs, hints=hints) @@ -137,7 +138,8 @@ class DomainResource(ks_flask.ResourceBase): domain['id'] = domain_id domain = self._normalize_dict(domain) ref = PROVIDERS.resource_api.create_domain( - domain['id'], domain, initiator=self.audit_initiator) + domain['id'], domain, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def patch(self, domain_id): @@ -150,7 +152,8 @@ class DomainResource(ks_flask.ResourceBase): validation.lazy_validate(schema.domain_update, domain) PROVIDERS.resource_api.get_domain(domain_id) ref = PROVIDERS.resource_api.update_domain( - domain_id, domain, initiator=self.audit_initiator) + domain_id, domain, initiator=self.audit_initiator + ) return self.wrap_member(ref) def delete(self, domain_id): @@ -160,7 +163,8 @@ class DomainResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call(action='identity:delete_domain') PROVIDERS.resource_api.delete_domain( - domain_id, initiator=self.audit_initiator) + domain_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT @@ -178,14 +182,15 @@ class DomainConfigBase(ks_flask.ResourceBase): config = {} try: PROVIDERS.resource_api.get_domain(domain_id) - except Exception as e: # nosec + except Exception as e: # nosec # We don't raise out here, we raise out after enforcement, this # ensures we do not leak domain existance. err = e finally: if group and group == 'security_compliance': config = self._get_security_compliance_config( - domain_id, group, option) + domain_id, group, option + ) else: config = self._get_config(domain_id, group, option) if err is not None: @@ -195,13 +200,16 @@ class DomainConfigBase(ks_flask.ResourceBase): def _get_config(self, domain_id, group, option): ENFORCER.enforce_call(action='identity:get_domain_config') return PROVIDERS.domain_config_api.get_config( - domain_id, group=group, option=option) + domain_id, group=group, option=option + ) def _get_security_compliance_config(self, domain_id, group, option): ENFORCER.enforce_call( - action='identity:get_security_compliance_domain_config') + action='identity:get_security_compliance_domain_config' + ) return PROVIDERS.domain_config_api.get_security_compliance_config( - domain_id, group, option=option) + domain_id, group, option=option + ) def patch(self, domain_id=None, group=None, option=None): """Update domain config option. @@ -214,7 +222,8 @@ class DomainConfigBase(ks_flask.ResourceBase): PROVIDERS.resource_api.get_domain(domain_id) config = self.request_body_json.get('config', {}) ref = PROVIDERS.domain_config_api.update_config( - domain_id, config, group, option=option) + domain_id, config, group, option=option + ) return {self.member_key: ref} def delete(self, domain_id=None, group=None, option=None): @@ -227,7 +236,8 @@ class DomainConfigBase(ks_flask.ResourceBase): ENFORCER.enforce_call(action='identity:delete_domain_config') PROVIDERS.resource_api.get_domain(domain_id) PROVIDERS.domain_config_api.delete_config( - domain_id, group, option=option) + domain_id, group, option=option + ) return None, http.client.NO_CONTENT @@ -313,7 +323,8 @@ class DefaultConfigOptionResource(flask_restful.Resource): """ ENFORCER.enforce_call(action='identity:get_domain_config_default') ref = PROVIDERS.domain_config_api.get_config_default( - group=group, option=option) + group=group, option=option + ) return {'config': ref} @@ -325,12 +336,14 @@ class DomainUserListResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:list_grants', - build_target=_build_enforcement_target) + build_target=_build_enforcement_target, + ) refs = PROVIDERS.assignment_api.list_grants( - domain_id=domain_id, user_id=user_id, - inherited_to_projects=False) + domain_id=domain_id, user_id=user_id, inherited_to_projects=False + ) return ks_flask.ResourceBase.wrap_collection( - refs, collection_name='roles') + refs, collection_name='roles' + ) class DomainUserResource(ks_flask.ResourceBase): @@ -344,10 +357,14 @@ class DomainUserResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:check_grant', - build_target=_build_enforcement_target) + build_target=_build_enforcement_target, + ) PROVIDERS.assignment_api.get_grant( - role_id, domain_id=domain_id, user_id=user_id, - inherited_to_projects=False) + role_id, + domain_id=domain_id, + user_id=user_id, + inherited_to_projects=False, + ) return None, http.client.NO_CONTENT def put(self, domain_id=None, user_id=None, role_id=None): @@ -357,10 +374,15 @@ class DomainUserResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:create_grant', - build_target=_build_enforcement_target) + build_target=_build_enforcement_target, + ) PROVIDERS.assignment_api.create_grant( - role_id, domain_id=domain_id, user_id=user_id, - inherited_to_projects=False, initiator=self.audit_initiator) + role_id, + domain_id=domain_id, + user_id=user_id, + inherited_to_projects=False, + initiator=self.audit_initiator, + ) return None, http.client.NO_CONTENT def delete(self, domain_id=None, user_id=None, role_id=None): @@ -370,11 +392,17 @@ class DomainUserResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:revoke_grant', - build_target=functools.partial(_build_enforcement_target, - allow_non_existing=True)) + build_target=functools.partial( + _build_enforcement_target, allow_non_existing=True + ), + ) PROVIDERS.assignment_api.delete_grant( - role_id, domain_id=domain_id, user_id=user_id, - inherited_to_projects=False, initiator=self.audit_initiator) + role_id, + domain_id=domain_id, + user_id=user_id, + inherited_to_projects=False, + initiator=self.audit_initiator, + ) return None, http.client.NO_CONTENT @@ -386,12 +414,14 @@ class DomainGroupListResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:list_grants', - build_target=_build_enforcement_target) + build_target=_build_enforcement_target, + ) refs = PROVIDERS.assignment_api.list_grants( - domain_id=domain_id, group_id=group_id, - inherited_to_projects=False) + domain_id=domain_id, group_id=group_id, inherited_to_projects=False + ) return ks_flask.ResourceBase.wrap_collection( - refs, collection_name='roles') + refs, collection_name='roles' + ) class DomainGroupResource(ks_flask.ResourceBase): @@ -405,10 +435,14 @@ class DomainGroupResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:check_grant', - build_target=_build_enforcement_target) + build_target=_build_enforcement_target, + ) PROVIDERS.assignment_api.get_grant( - role_id, domain_id=domain_id, group_id=group_id, - inherited_to_projects=False) + role_id, + domain_id=domain_id, + group_id=group_id, + inherited_to_projects=False, + ) return None, http.client.NO_CONTENT def put(self, domain_id=None, group_id=None, role_id=None): @@ -418,10 +452,15 @@ class DomainGroupResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:create_grant', - build_target=_build_enforcement_target) + build_target=_build_enforcement_target, + ) PROVIDERS.assignment_api.create_grant( - role_id, domain_id=domain_id, group_id=group_id, - inherited_to_projects=False, initiator=self.audit_initiator) + role_id, + domain_id=domain_id, + group_id=group_id, + inherited_to_projects=False, + initiator=self.audit_initiator, + ) return None, http.client.NO_CONTENT def delete(self, domain_id=None, group_id=None, role_id=None): @@ -431,11 +470,17 @@ class DomainGroupResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:revoke_grant', - build_target=functools.partial(_build_enforcement_target, - allow_non_existing=True)) + build_target=functools.partial( + _build_enforcement_target, allow_non_existing=True + ), + ) PROVIDERS.assignment_api.delete_grant( - role_id, domain_id=domain_id, group_id=group_id, - inherited_to_projects=False, initiator=self.audit_initiator) + role_id, + domain_id=domain_id, + group_id=group_id, + inherited_to_projects=False, + initiator=self.audit_initiator, + ) return None, http.client.NO_CONTENT @@ -451,8 +496,8 @@ class DomainAPI(ks_flask.APIBase): url=('/domains//config'), resource_kwargs={}, rel='domain_config', - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID}), + path_vars={'domain_id': json_home.Parameters.DOMAIN_ID}, + ), ks_flask.construct_resource_map( resource=DomainConfigGroupResource, url='/domains//config/', @@ -460,81 +505,96 @@ class DomainAPI(ks_flask.APIBase): rel='domain_config_group', path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group': CONFIG_GROUP}), + 'group': CONFIG_GROUP, + }, + ), ks_flask.construct_resource_map( resource=DomainConfigOptionResource, - url=('/domains//config/' - '/'), + url=( + '/domains//config/' + '/' + ), resource_kwargs={}, rel='domain_config_option', path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, 'group': CONFIG_GROUP, - 'option': CONFIG_OPTION}), + 'option': CONFIG_OPTION, + }, + ), ks_flask.construct_resource_map( resource=DefaultConfigResource, url=('/domains/config/default'), resource_kwargs={}, rel='domain_config_default', - path_vars={}), + path_vars={}, + ), ks_flask.construct_resource_map( resource=DefaultConfigGroupResource, url='/domains/config//default', resource_kwargs={}, rel='domain_config_default_group', - path_vars={ - 'group': CONFIG_GROUP}), + path_vars={'group': CONFIG_GROUP}, + ), ks_flask.construct_resource_map( resource=DefaultConfigOptionResource, - url=('/domains/config/' - '//default'), + url=('/domains/config/' '//default'), resource_kwargs={}, rel='domain_config_default_option', - path_vars={ - 'group': CONFIG_GROUP, - 'option': CONFIG_OPTION}), + path_vars={'group': CONFIG_GROUP, 'option': CONFIG_OPTION}, + ), ks_flask.construct_resource_map( resource=DomainUserListResource, - url=('/domains//users' - '//roles'), + url=( + '/domains//users' '//roles' + ), resource_kwargs={}, rel='domain_user_roles', path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, 'user_id': json_home.Parameters.USER_ID, - }), + }, + ), ks_flask.construct_resource_map( resource=DomainUserResource, - url=('/domains//users' - '//roles/'), + url=( + '/domains//users' + '//roles/' + ), resource_kwargs={}, rel='domain_user_role', path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, 'user_id': json_home.Parameters.USER_ID, - 'role_id': json_home.Parameters.ROLE_ID - }), + 'role_id': json_home.Parameters.ROLE_ID, + }, + ), ks_flask.construct_resource_map( resource=DomainGroupListResource, - url=('/domains//groups' - '//roles'), + url=( + '/domains//groups' '//roles' + ), resource_kwargs={}, rel='domain_group_roles', path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, 'group_id': json_home.Parameters.GROUP_ID, - }), + }, + ), ks_flask.construct_resource_map( resource=DomainGroupResource, - url=('/domains//groups' - '//roles/'), + url=( + '/domains//groups' + '//roles/' + ), resource_kwargs={}, rel='domain_group_role', path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID - }) + 'role_id': json_home.Parameters.ROLE_ID, + }, + ), ] diff --git a/keystone/api/ec2tokens.py b/keystone/api/ec2tokens.py index d21673a031..52252f71f5 100644 --- a/keystone/api/ec2tokens.py +++ b/keystone/api/ec2tokens.py @@ -51,14 +51,12 @@ class EC2TokensResource(EC2_S3_Resource.ResourceBase): # other programming language libraries, JAVA for example. signer = ec2_utils.Ec2Signer(creds_ref['secret']) signature = signer.generate(credentials) - if utils.auth_str_equal( - credentials['signature'], signature): + if utils.auth_str_equal(credentials['signature'], signature): return True raise exception.Unauthorized(_('Invalid EC2 signature.')) # Raise the exception when credentials.get('signature') is None else: - raise exception.Unauthorized( - _('EC2 signature not supplied.')) + raise exception.Unauthorized(_('EC2 signature not supplied.')) @ks_flask.unenforced_api def post(self): @@ -86,7 +84,9 @@ class EC2TokensAPI(ks_flask.APIBase): resource_kwargs={}, rel='ec2tokens', resource_relation_func=( - json_home_relations.os_ec2_resource_rel_func)) + json_home_relations.os_ec2_resource_rel_func + ), + ) ] diff --git a/keystone/api/endpoints.py b/keystone/api/endpoints.py index 66496ded5b..87d89d7639 100644 --- a/keystone/api/endpoints.py +++ b/keystone/api/endpoints.py @@ -43,7 +43,8 @@ class EndpointResource(ks_flask.ResourceBase): collection_key = 'endpoints' member_key = 'endpoint' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='catalog_api', method='get_endpoint') + api='catalog_api', method='get_endpoint' + ) @staticmethod def _validate_endpoint_region(endpoint): @@ -54,8 +55,10 @@ class EndpointResource(ks_flask.ResourceBase): then for backward compatibility, we will auto-create the region. """ - if (endpoint.get('region_id') is None and - endpoint.get('region') is not None): + if ( + endpoint.get('region_id') is None + and endpoint.get('region') is not None + ): # To maintain backward compatibility with clients that are # using the v3 API in the same way as they used the v2 API, # create the endpoint region, if that region does not exist @@ -66,22 +69,26 @@ class EndpointResource(ks_flask.ResourceBase): except exception.RegionNotFound: region = dict(id=endpoint['region_id']) PROVIDERS.catalog_api.create_region( - region, initiator=notifications.build_audit_initiator()) + region, initiator=notifications.build_audit_initiator() + ) return endpoint def _get_endpoint(self, endpoint_id): ENFORCER.enforce_call(action='identity:get_endpoint') - return self.wrap_member(_filter_endpoint( - PROVIDERS.catalog_api.get_endpoint(endpoint_id))) + return self.wrap_member( + _filter_endpoint(PROVIDERS.catalog_api.get_endpoint(endpoint_id)) + ) def _list_endpoints(self): filters = ['interface', 'service_id', 'region_id'] - ENFORCER.enforce_call(action='identity:list_endpoints', - filters=filters) + ENFORCER.enforce_call( + action='identity:list_endpoints', filters=filters + ) hints = self.build_driver_hints(filters) refs = PROVIDERS.catalog_api.list_endpoints(hints=hints) - return self.wrap_collection([_filter_endpoint(r) for r in refs], - hints=hints) + return self.wrap_collection( + [_filter_endpoint(r) for r in refs], hints=hints + ) def get(self, endpoint_id=None): if endpoint_id is not None: @@ -96,7 +103,8 @@ class EndpointResource(ks_flask.ResourceBase): endpoint = self._assign_unique_id(self._normalize_dict(endpoint)) endpoint = self._validate_endpoint_region(endpoint) ref = PROVIDERS.catalog_api.create_endpoint( - endpoint['id'], endpoint, initiator=self.audit_initiator) + endpoint['id'], endpoint, initiator=self.audit_initiator + ) return self.wrap_member(_filter_endpoint(ref)), http.client.CREATED def patch(self, endpoint_id): @@ -106,13 +114,15 @@ class EndpointResource(ks_flask.ResourceBase): self._require_matching_id(endpoint) endpoint = self._validate_endpoint_region(endpoint) ref = PROVIDERS.catalog_api.update_endpoint( - endpoint_id, endpoint, initiator=self.audit_initiator) + endpoint_id, endpoint, initiator=self.audit_initiator + ) return self.wrap_member(_filter_endpoint(ref)) def delete(self, endpoint_id): ENFORCER.enforce_call(action='identity:delete_endpoint') - PROVIDERS.catalog_api.delete_endpoint(endpoint_id, - initiator=self.audit_initiator) + PROVIDERS.catalog_api.delete_endpoint( + endpoint_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT @@ -121,9 +131,11 @@ class EndpointPolicyEndpointResource(flask_restful.Resource): ENFORCER.enforce_call(action='identity:get_policy_for_endpoint') PROVIDERS.catalog_api.get_endpoint(endpoint_id) ref = PROVIDERS.endpoint_policy_api.get_policy_for_endpoint( - endpoint_id) + endpoint_id + ) return ks_flask.ResourceBase.wrap_member( - ref, collection_name='endpoints', member_name='policy') + ref, collection_name='endpoints', member_name='policy' + ) class EndpointAPI(ks_flask.APIBase): @@ -137,7 +149,8 @@ class EndpointAPI(ks_flask.APIBase): resource_kwargs={}, rel='endpoint_policy', resource_relation_func=_resource_rel_func, - path_vars={'endpoint_id': json_home.Parameters.ENDPOINT_ID}) + path_vars={'endpoint_id': json_home.Parameters.ENDPOINT_ID}, + ) ] diff --git a/keystone/api/groups.py b/keystone/api/groups.py index f4283ba999..b80091f9ec 100644 --- a/keystone/api/groups.py +++ b/keystone/api/groups.py @@ -51,7 +51,8 @@ class GroupsResource(ks_flask.ResourceBase): collection_key = 'groups' member_key = 'group' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='identity_api', method='get_group') + api='identity_api', method='get_group' + ) def get(self, group_id=None): if group_id is not None: @@ -65,7 +66,7 @@ class GroupsResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:get_group', - build_target=_build_group_target_enforcement + build_target=_build_group_target_enforcement, ) return self.wrap_member(PROVIDERS.identity_api.get_group(group_id)) @@ -78,12 +79,14 @@ class GroupsResource(ks_flask.ResourceBase): target = None if self.oslo_context.domain_id: target = {'group': {'domain_id': self.oslo_context.domain_id}} - ENFORCER.enforce_call(action='identity:list_groups', filters=filters, - target_attr=target) + ENFORCER.enforce_call( + action='identity:list_groups', filters=filters, target_attr=target + ) hints = self.build_driver_hints(filters) domain = self._get_domain_id_for_list_request() - refs = PROVIDERS.identity_api.list_groups(domain_scope=domain, - hints=hints) + refs = PROVIDERS.identity_api.list_groups( + domain_scope=domain, hints=hints + ) if self.oslo_context.domain_id: filtered_refs = [] for ref in refs: @@ -106,7 +109,8 @@ class GroupsResource(ks_flask.ResourceBase): group = self._normalize_dict(group) group = self._normalize_domain_id(group) ref = PROVIDERS.identity_api.create_group( - group, initiator=self.audit_initiator) + group, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def patch(self, group_id): @@ -116,13 +120,14 @@ class GroupsResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:update_group', - build_target=_build_group_target_enforcement + build_target=_build_group_target_enforcement, ) group = self.request_body_json.get('group', {}) validation.lazy_validate(schema.group_update, group) self._require_matching_id(group) ref = PROVIDERS.identity_api.update_group( - group_id, group, initiator=self.audit_initiator) + group_id, group, initiator=self.audit_initiator + ) return self.wrap_member(ref) def delete(self, group_id): @@ -132,7 +137,8 @@ class GroupsResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call(action='identity:delete_group') PROVIDERS.identity_api.delete_group( - group_id, initiator=self.audit_initiator) + group_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT @@ -151,19 +157,24 @@ class GroupUsersResource(ks_flask.ResourceBase): # data, leage target empty. This is the safest route and does not # leak data before enforcement happens. pass - ENFORCER.enforce_call(action='identity:list_users_in_group', - target_attr=target, filters=filters) + ENFORCER.enforce_call( + action='identity:list_users_in_group', + target_attr=target, + filters=filters, + ) hints = ks_flask.ResourceBase.build_driver_hints(filters) refs = PROVIDERS.identity_api.list_users_in_group( - group_id, hints=hints) - if (self.oslo_context.domain_id): + group_id, hints=hints + ) + if self.oslo_context.domain_id: filtered_refs = [] for ref in refs: if ref['domain_id'] == self.oslo_context.domain_id: filtered_refs.append(ref) refs = filtered_refs return ks_flask.ResourceBase.wrap_collection( - refs, hints=hints, collection_name='users') + refs, hints=hints, collection_name='users' + ) class UserGroupCRUDResource(flask_restful.Resource): @@ -191,8 +202,10 @@ class UserGroupCRUDResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:check_user_in_group', - build_target=functools.partial(self._build_enforcement_target_attr, - user_id, group_id)) + build_target=functools.partial( + self._build_enforcement_target_attr, user_id, group_id + ), + ) PROVIDERS.identity_api.check_user_in_group(user_id, group_id) return None, http.client.NO_CONTENT @@ -203,10 +216,13 @@ class UserGroupCRUDResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:add_user_to_group', - build_target=functools.partial(self._build_enforcement_target_attr, - user_id, group_id)) + build_target=functools.partial( + self._build_enforcement_target_attr, user_id, group_id + ), + ) PROVIDERS.identity_api.add_user_to_group( - user_id, group_id, initiator=notifications.build_audit_initiator()) + user_id, group_id, initiator=notifications.build_audit_initiator() + ) return None, http.client.NO_CONTENT def delete(self, group_id, user_id): @@ -216,10 +232,13 @@ class UserGroupCRUDResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:remove_user_from_group', - build_target=functools.partial(self._build_enforcement_target_attr, - user_id, group_id)) + build_target=functools.partial( + self._build_enforcement_target_attr, user_id, group_id + ), + ) PROVIDERS.identity_api.remove_user_from_group( - user_id, group_id, initiator=notifications.build_audit_initiator()) + user_id, group_id, initiator=notifications.build_audit_initiator() + ) return None, http.client.NO_CONTENT @@ -233,7 +252,8 @@ class GroupAPI(ks_flask.APIBase): url='/groups//users', resource_kwargs={}, rel='group_users', - path_vars={'group_id': json_home.Parameters.GROUP_ID}), + path_vars={'group_id': json_home.Parameters.GROUP_ID}, + ), ks_flask.construct_resource_map( resource=UserGroupCRUDResource, url='/groups//users/', @@ -241,7 +261,9 @@ class GroupAPI(ks_flask.APIBase): rel='group_user', path_vars={ 'group_id': json_home.Parameters.GROUP_ID, - 'user_id': json_home.Parameters.USER_ID}) + 'user_id': json_home.Parameters.USER_ID, + }, + ), ] diff --git a/keystone/api/limits.py b/keystone/api/limits.py index d8f94c8ea0..99653d6802 100644 --- a/keystone/api/limits.py +++ b/keystone/api/limits.py @@ -55,11 +55,17 @@ class LimitsResource(ks_flask.ResourceBase): member_key = 'limit' json_home_resource_status = json_home.Status.EXPERIMENTAL get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='unified_limit_api', method='get_limit') + api='unified_limit_api', method='get_limit' + ) def _list_limits(self): - filters = ['service_id', 'region_id', 'resource_name', 'project_id', - 'domain_id'] + filters = [ + 'service_id', + 'region_id', + 'resource_name', + 'project_id', + 'domain_id', + ] ENFORCER.enforce_call(action='identity:list_limits', filters=filters) @@ -90,8 +96,10 @@ class LimitsResource(ks_flask.ResourceBase): return self.wrap_collection(filtered_refs, hints=hints) def _get_limit(self, limit_id): - ENFORCER.enforce_call(action='identity:get_limit', - build_target=_build_limit_enforcement_target) + ENFORCER.enforce_call( + action='identity:get_limit', + build_target=_build_limit_enforcement_target, + ) ref = PROVIDERS.unified_limit_api.get_limit(limit_id) return self.wrap_member(ref) @@ -103,10 +111,13 @@ class LimitsResource(ks_flask.ResourceBase): def post(self): ENFORCER.enforce_call(action='identity:create_limits') limits_b = (flask.request.get_json(silent=True, force=True) or {}).get( - 'limits', {}) + 'limits', {} + ) validation.lazy_validate(schema.limit_create, limits_b) - limits = [self._assign_unique_id(self._normalize_dict(limit)) - for limit in limits_b] + limits = [ + self._assign_unique_id(self._normalize_dict(limit)) + for limit in limits_b + ] refs = PROVIDERS.unified_limit_api.create_limits(limits) refs = self.wrap_collection(refs) refs.pop('links') @@ -115,7 +126,8 @@ class LimitsResource(ks_flask.ResourceBase): def patch(self, limit_id): ENFORCER.enforce_call(action='identity:update_limit') limit = (flask.request.get_json(silent=True, force=True) or {}).get( - 'limit', {}) + 'limit', {} + ) validation.lazy_validate(schema.limit_update, limit) self._require_matching_id(limit) ref = PROVIDERS.unified_limit_api.update_limit(limit_id, limit) @@ -123,8 +135,10 @@ class LimitsResource(ks_flask.ResourceBase): def delete(self, limit_id): ENFORCER.enforce_call(action='identity:delete_limit') - return (PROVIDERS.unified_limit_api.delete_limit(limit_id), - http.client.NO_CONTENT) + return ( + PROVIDERS.unified_limit_api.delete_limit(limit_id), + http.client.NO_CONTENT, + ) class LimitModelResource(flask_restful.Resource): @@ -144,7 +158,7 @@ class LimitsAPI(ks_flask.APIBase): resource_kwargs={}, url='/limits/model', rel='limit_model', - status=json_home.Status.EXPERIMENTAL + status=json_home.Status.EXPERIMENTAL, ) ] diff --git a/keystone/api/os_ep_filter.py b/keystone/api/os_ep_filter.py index 055d21debb..1e4f3ee9d3 100644 --- a/keystone/api/os_ep_filter.py +++ b/keystone/api/os_ep_filter.py @@ -34,7 +34,8 @@ _build_resource_relation = json_home_relations.os_ep_filter_resource_rel_func _build_parameter_relation = json_home_relations.os_ep_filter_parameter_rel_func _ENDPOINT_GROUP_PARAMETER_RELATION = _build_parameter_relation( - parameter_name='endpoint_group_id') + parameter_name='endpoint_group_id' +) # NOTE(morgan): This is shared from keystone.api.endpoint, this is a special @@ -58,17 +59,20 @@ class EndpointGroupsResource(ks_flask.ResourceBase): if key not in valid_filter_keys: raise exception.ValidationError( attribute=' or '.join(valid_filter_keys), - target='endpoint_group') + target='endpoint_group', + ) def _get_endpoint_group(self, endpoint_group_id): ENFORCER.enforce_call(action='identity:get_endpoint_group') return self.wrap_member( - PROVIDERS.catalog_api.get_endpoint_group(endpoint_group_id)) + PROVIDERS.catalog_api.get_endpoint_group(endpoint_group_id) + ) def _list_endpoint_groups(self): - filters = ('name') - ENFORCER.enforce_call(action='identity:list_endpoint_groups', - filters=filters) + filters = 'name' + ENFORCER.enforce_call( + action='identity:list_endpoint_groups', filters=filters + ) hints = self.build_driver_hints(filters) refs = PROVIDERS.catalog_api.list_endpoint_groups(hints) return self.wrap_collection(refs, hints=hints) @@ -89,8 +93,14 @@ class EndpointGroupsResource(ks_flask.ResourceBase): raise exception.ValidationError(message=msg) self._require_valid_filter(ep_group) ep_group = self._assign_unique_id(ep_group) - return self.wrap_member(PROVIDERS.catalog_api.create_endpoint_group( - ep_group['id'], ep_group)), http.client.CREATED + return ( + self.wrap_member( + PROVIDERS.catalog_api.create_endpoint_group( + ep_group['id'], ep_group + ) + ), + http.client.CREATED, + ) def patch(self, endpoint_group_id): ENFORCER.enforce_call(action='identity:update_endpoint_group') @@ -99,13 +109,18 @@ class EndpointGroupsResource(ks_flask.ResourceBase): if 'filters' in ep_group: self._require_valid_filter(ep_group) self._require_matching_id(ep_group) - return self.wrap_member(PROVIDERS.catalog_api.update_endpoint_group( - endpoint_group_id, ep_group)) + return self.wrap_member( + PROVIDERS.catalog_api.update_endpoint_group( + endpoint_group_id, ep_group + ) + ) def delete(self, endpoint_group_id): ENFORCER.enforce_call(action='identity:delete_endpoint_group') - return (PROVIDERS.catalog_api.delete_endpoint_group(endpoint_group_id), - http.client.NO_CONTENT) + return ( + PROVIDERS.catalog_api.delete_endpoint_group(endpoint_group_id), + http.client.NO_CONTENT, + ) class EPFilterEndpointProjectsResource(flask_restful.Resource): @@ -114,10 +129,13 @@ class EPFilterEndpointProjectsResource(flask_restful.Resource): ENFORCER.enforce_call(action='identity:list_projects_for_endpoint') PROVIDERS.catalog_api.get_endpoint(endpoint_id) refs = PROVIDERS.catalog_api.list_projects_for_endpoint(endpoint_id) - projects = [PROVIDERS.resource_api.get_project(ref['project_id']) - for ref in refs] + projects = [ + PROVIDERS.resource_api.get_project(ref['project_id']) + for ref in refs + ] return ks_flask.ResourceBase.wrap_collection( - projects, collection_name='projects') + projects, collection_name='projects' + ) class EPFilterProjectsEndpointsResource(flask_restful.Resource): @@ -126,7 +144,8 @@ class EPFilterProjectsEndpointsResource(flask_restful.Resource): PROVIDERS.catalog_api.get_endpoint(endpoint_id) PROVIDERS.resource_api.get_project(project_id) PROVIDERS.catalog_api.check_endpoint_in_project( - endpoint_id, project_id) + endpoint_id, project_id + ) return None, http.client.NO_CONTENT def put(self, project_id, endpoint_id): @@ -138,8 +157,12 @@ class EPFilterProjectsEndpointsResource(flask_restful.Resource): def delete(self, project_id, endpoint_id): ENFORCER.enforce_call(action='identity:remove_endpoint_from_project') - return (PROVIDERS.catalog_api.remove_endpoint_from_project( - endpoint_id, project_id), http.client.NO_CONTENT) + return ( + PROVIDERS.catalog_api.remove_endpoint_from_project( + endpoint_id, project_id + ), + http.client.NO_CONTENT, + ) class EPFilterProjectEndpointsListResource(flask_restful.Resource): @@ -147,49 +170,62 @@ class EPFilterProjectEndpointsListResource(flask_restful.Resource): ENFORCER.enforce_call(action='identity:list_endpoints_for_project') PROVIDERS.resource_api.get_project(project_id) filtered_endpoints = PROVIDERS.catalog_api.list_endpoints_for_project( - project_id) + project_id + ) return ks_flask.ResourceBase.wrap_collection( [_filter_endpoint(v) for v in filtered_endpoints.values()], - collection_name='endpoints') + collection_name='endpoints', + ) class EndpointFilterProjectEndpointGroupsListResource(flask_restful.Resource): def get(self, project_id): ENFORCER.enforce_call( - action='identity:list_endpoint_groups_for_project') + action='identity:list_endpoint_groups_for_project' + ) return EndpointGroupsResource.wrap_collection( - PROVIDERS.catalog_api.get_endpoint_groups_for_project(project_id)) + PROVIDERS.catalog_api.get_endpoint_groups_for_project(project_id) + ) class EndpointFilterEPGroupsProjects(flask_restful.Resource): def get(self, endpoint_group_id): ENFORCER.enforce_call( - action='identity:list_projects_associated_with_endpoint_group') - endpoint_group_refs = (PROVIDERS.catalog_api. - list_projects_associated_with_endpoint_group( - endpoint_group_id)) + action='identity:list_projects_associated_with_endpoint_group' + ) + endpoint_group_refs = ( + PROVIDERS.catalog_api.list_projects_associated_with_endpoint_group( + endpoint_group_id + ) + ) projects = [] for endpoint_group_ref in endpoint_group_refs: project = PROVIDERS.resource_api.get_project( - endpoint_group_ref['project_id']) + endpoint_group_ref['project_id'] + ) if project: projects.append(project) return ks_flask.ResourceBase.wrap_collection( - projects, collection_name='projects') + projects, collection_name='projects' + ) class EndpointFilterEPGroupsEndpoints(flask_restful.Resource): def get(self, endpoint_group_id): ENFORCER.enforce_call( - action='identity:list_endpoints_associated_with_endpoint_group') - filtered_endpoints = (PROVIDERS.catalog_api. - get_endpoints_filtered_by_endpoint_group( - endpoint_group_id)) + action='identity:list_endpoints_associated_with_endpoint_group' + ) + filtered_endpoints = ( + PROVIDERS.catalog_api.get_endpoints_filtered_by_endpoint_group( + endpoint_group_id + ) + ) return ks_flask.ResourceBase.wrap_collection( [_filter_endpoint(e) for e in filtered_endpoints], - collection_name='endpoints') + collection_name='endpoints', + ) class EPFilterGroupsProjectsResource(ks_flask.ResourceBase): @@ -198,10 +234,14 @@ class EPFilterGroupsProjectsResource(ks_flask.ResourceBase): @classmethod def _add_self_referential_link(cls, ref, collection_name=None): - url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/projects/%(project_id)s' % { - 'endpoint_group_id': ref['endpoint_group_id'], - 'project_id': ref['project_id']}) + url = ( + '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' + '/projects/%(project_id)s' + % { + 'endpoint_group_id': ref['endpoint_group_id'], + 'project_id': ref['project_id'], + } + ) ref.setdefault('links', {}) ref['links']['self'] = url @@ -210,7 +250,8 @@ class EPFilterGroupsProjectsResource(ks_flask.ResourceBase): PROVIDERS.resource_api.get_project(project_id) PROVIDERS.catalog_api.get_endpoint_group(endpoint_group_id) ref = PROVIDERS.catalog_api.get_endpoint_group_in_project( - endpoint_group_id, project_id) + endpoint_group_id, project_id + ) return self.wrap_member(ref) def put(self, endpoint_group_id, project_id): @@ -218,16 +259,19 @@ class EPFilterGroupsProjectsResource(ks_flask.ResourceBase): PROVIDERS.resource_api.get_project(project_id) PROVIDERS.catalog_api.get_endpoint_group(endpoint_group_id) PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group_id, project_id) + endpoint_group_id, project_id + ) return None, http.client.NO_CONTENT def delete(self, endpoint_group_id, project_id): ENFORCER.enforce_call( - action='identity:remove_endpoint_group_from_project') + action='identity:remove_endpoint_group_from_project' + ) PROVIDERS.resource_api.get_project(project_id) PROVIDERS.catalog_api.get_endpoint_group(endpoint_group_id) PROVIDERS.catalog_api.remove_endpoint_group_from_project( - endpoint_group_id, project_id) + endpoint_group_id, project_id + ) return None, http.client.NO_CONTENT @@ -243,9 +287,8 @@ class EPFilterAPI(ks_flask.APIBase): resource_kwargs={}, rel='endpoint_projects', resource_relation_func=_build_resource_relation, - path_vars={ - 'endpoint_id': json_home.Parameters.ENDPOINT_ID - }), + path_vars={'endpoint_id': json_home.Parameters.ENDPOINT_ID}, + ), ks_flask.construct_resource_map( resource=EPFilterProjectsEndpointsResource, url='/projects//endpoints/', @@ -254,21 +297,25 @@ class EPFilterAPI(ks_flask.APIBase): resource_relation_func=_build_resource_relation, path_vars={ 'endpoint_id': json_home.Parameters.ENDPOINT_ID, - 'project_id': json_home.Parameters.PROJECT_ID}), + 'project_id': json_home.Parameters.PROJECT_ID, + }, + ), ks_flask.construct_resource_map( resource=EPFilterProjectEndpointsListResource, url='/projects//endpoints', resource_kwargs={}, rel='project_endpoints', resource_relation_func=_build_resource_relation, - path_vars={'project_id': json_home.Parameters.PROJECT_ID}), + path_vars={'project_id': json_home.Parameters.PROJECT_ID}, + ), ks_flask.construct_resource_map( resource=EndpointFilterProjectEndpointGroupsListResource, url='/projects//endpoint_groups', resource_kwargs={}, rel='project_endpoint_groups', resource_relation_func=_build_resource_relation, - path_vars={'project_id': json_home.Parameters.PROJECT_ID}), + path_vars={'project_id': json_home.Parameters.PROJECT_ID}, + ), ks_flask.construct_resource_map( resource=EndpointFilterEPGroupsEndpoints, url='/endpoint_groups//endpoints', @@ -276,7 +323,9 @@ class EPFilterAPI(ks_flask.APIBase): rel='endpoints_in_endpoint_group', resource_relation_func=_build_resource_relation, path_vars={ - 'endpoint_group_id': _ENDPOINT_GROUP_PARAMETER_RELATION}), + 'endpoint_group_id': _ENDPOINT_GROUP_PARAMETER_RELATION + }, + ), ks_flask.construct_resource_map( resource=EndpointFilterEPGroupsProjects, url='/endpoint_groups//projects', @@ -284,17 +333,23 @@ class EPFilterAPI(ks_flask.APIBase): rel='projects_associated_with_endpoint_group', resource_relation_func=_build_resource_relation, path_vars={ - 'endpoint_group_id': _ENDPOINT_GROUP_PARAMETER_RELATION}), + 'endpoint_group_id': _ENDPOINT_GROUP_PARAMETER_RELATION + }, + ), ks_flask.construct_resource_map( resource=EPFilterGroupsProjectsResource, - url=('/endpoint_groups//projects/' - ''), + url=( + '/endpoint_groups//projects/' + '' + ), resource_kwargs={}, rel='endpoint_group_to_project_association', resource_relation_func=_build_resource_relation, - path_vars={'project_id': json_home.Parameters.PROJECT_ID, - 'endpoint_group_id': _ENDPOINT_GROUP_PARAMETER_RELATION - }), + path_vars={ + 'project_id': json_home.Parameters.PROJECT_ID, + 'endpoint_group_id': _ENDPOINT_GROUP_PARAMETER_RELATION, + }, + ), ] diff --git a/keystone/api/os_federation.py b/keystone/api/os_federation.py index 742962229b..01eecf911e 100644 --- a/keystone/api/os_federation.py +++ b/keystone/api/os_federation.py @@ -43,7 +43,8 @@ _build_resource_relation = json_home_relations.os_federation_resource_rel_func IDP_ID_PARAMETER_RELATION = _build_param_relation(parameter_name='idp_id') PROTOCOL_ID_PARAMETER_RELATION = _build_param_relation( - parameter_name='protocol_id') + parameter_name='protocol_id' +) SP_ID_PARAMETER_RELATION = _build_param_relation(parameter_name='sp_id') @@ -76,10 +77,17 @@ class IdentityProvidersResource(_ResourceBase): collection_key = 'identity_providers' member_key = 'identity_provider' api_prefix = '/OS-FEDERATION' - _public_parameters = frozenset(['id', 'enabled', 'description', - 'remote_ids', 'links', 'domain_id', - 'authorization_ttl' - ]) + _public_parameters = frozenset( + [ + 'id', + 'enabled', + 'description', + 'remote_ids', + 'links', + 'domain_id', + 'authorization_ttl', + ] + ) _id_path_param_name_override = 'idp_id' @staticmethod @@ -117,8 +125,9 @@ class IdentityProvidersResource(_ResourceBase): GET/HEAD /OS-FEDERATION/identity_providers """ filters = ['id', 'enabled'] - ENFORCER.enforce_call(action='identity:list_identity_providers', - filters=filters) + ENFORCER.enforce_call( + action='identity:list_identity_providers', filters=filters + ) hints = self.build_driver_hints(filters) refs = PROVIDERS.federation_api.list_idps(hints=hints) refs = [self.filter_params(r) for r in refs] @@ -135,12 +144,10 @@ class IdentityProvidersResource(_ResourceBase): """ ENFORCER.enforce_call(action='identity:create_identity_provider') idp = self.request_body_json.get('identity_provider', {}) - validation.lazy_validate(schema.identity_provider_create, - idp) + validation.lazy_validate(schema.identity_provider_create, idp) idp = self._normalize_dict(idp) idp.setdefault('enabled', False) - idp_ref = PROVIDERS.federation_api.create_idp( - idp_id, idp) + idp_ref = PROVIDERS.federation_api.create_idp(idp_id, idp) return self.wrap_member(idp_ref), http.client.CREATED def patch(self, idp_id): @@ -148,8 +155,7 @@ class IdentityProvidersResource(_ResourceBase): idp = self.request_body_json.get('identity_provider', {}) validation.lazy_validate(schema.identity_provider_update, idp) idp = self._normalize_dict(idp) - idp_ref = PROVIDERS.federation_api.update_idp( - idp_id, idp) + idp_ref = PROVIDERS.federation_api.update_idp(idp_id, idp) return self.wrap_member(idp_ref) def delete(self, idp_id): @@ -162,8 +168,7 @@ class _IdentityProvidersProtocolsResourceBase(_ResourceBase): collection_key = 'protocols' member_key = 'protocol' _public_parameters = frozenset(['id', 'mapping_id', 'links']) - json_home_additional_parameters = { - 'idp_id': IDP_ID_PARAMETER_RELATION} + json_home_additional_parameters = {'idp_id': IDP_ID_PARAMETER_RELATION} json_home_collection_resource_name_override = 'identity_provider_protocols' json_home_member_resource_name_override = 'identity_provider_protocol' @@ -179,7 +184,8 @@ class _IdentityProvidersProtocolsResourceBase(_ResourceBase): """ ref.setdefault('links', {}) ref['links']['identity_provider'] = ks_flask.base_url( - path=ref['idp_id']) + path=ref['idp_id'] + ) class IDPProtocolsListResource(_IdentityProvidersProtocolsResourceBase): @@ -220,8 +226,9 @@ class IDPProtocolsCRUDResource(_IdentityProvidersProtocolsResourceBase): protocol = self.request_body_json.get('protocol', {}) validation.lazy_validate(schema.protocol_create, protocol) protocol = self._normalize_dict(protocol) - ref = PROVIDERS.federation_api.create_protocol(idp_id, protocol_id, - protocol) + ref = PROVIDERS.federation_api.create_protocol( + idp_id, protocol_id, protocol + ) return self.wrap_member(ref), http.client.CREATED def patch(self, idp_id, protocol_id): @@ -233,8 +240,9 @@ class IDPProtocolsCRUDResource(_IdentityProvidersProtocolsResourceBase): ENFORCER.enforce_call(action='identity:update_protocol') protocol = self.request_body_json.get('protocol', {}) validation.lazy_validate(schema.protocol_update, protocol) - ref = PROVIDERS.federation_api.update_protocol(idp_id, protocol_id, - protocol) + ref = PROVIDERS.federation_api.update_protocol( + idp_id, protocol_id, protocol + ) return self.wrap_member(ref) def delete(self, idp_id, protocol_id): @@ -264,8 +272,9 @@ class MappingResource(_ResourceBase): HEAD/GET /OS-FEDERATION/mappings/{mapping_id} """ ENFORCER.enforce_call(action='identity:get_mapping') - return self.wrap_member(PROVIDERS.federation_api.get_mapping( - mapping_id)) + return self.wrap_member( + PROVIDERS.federation_api.get_mapping(mapping_id) + ) def _list_mappings(self): """List mappings. @@ -276,22 +285,28 @@ class MappingResource(_ResourceBase): return self.wrap_collection(PROVIDERS.federation_api.list_mappings()) def _internal_normalize_and_validate_attribute_mapping( - self, action_executed_message="created"): + self, action_executed_message="created" + ): mapping = self.request_body_json.get('mapping', {}) mapping = self._normalize_dict(mapping) if not mapping.get('schema_version'): - default_schema_version =\ + default_schema_version = ( utils.get_default_attribute_mapping_schema_version() - LOG.debug("A mapping [%s] was %s without providing a " - "'schema_version'; therefore, we need to set one. The " - "current default is [%s]. We will use this value for " - "the attribute mapping being registered. It is " - "recommended that one does not rely on this default " - "value, as it can change, and the already persisted " - "attribute mappings will remain with the previous " - "default values.", mapping, action_executed_message, - default_schema_version) + ) + LOG.debug( + "A mapping [%s] was %s without providing a " + "'schema_version'; therefore, we need to set one. The " + "current default is [%s]. We will use this value for " + "the attribute mapping being registered. It is " + "recommended that one does not rely on this default " + "value, as it can change, and the already persisted " + "attribute mappings will remain with the previous " + "default values.", + mapping, + action_executed_message, + default_schema_version, + ) mapping['schema_version'] = default_schema_version utils.validate_mapping_structure(mapping) return mapping @@ -304,7 +319,8 @@ class MappingResource(_ResourceBase): ENFORCER.enforce_call(action='identity:create_mapping') am = self._internal_normalize_and_validate_attribute_mapping( - "registered") + "registered" + ) mapping_ref = PROVIDERS.federation_api.create_mapping(mapping_id, am) return self.wrap_member(mapping_ref), http.client.CREATED @@ -334,8 +350,17 @@ class MappingResource(_ResourceBase): class ServiceProvidersResource(_ResourceBase): collection_key = 'service_providers' member_key = 'service_provider' - _public_parameters = frozenset(['auth_url', 'id', 'enabled', 'description', - 'links', 'relay_state_prefix', 'sp_url']) + _public_parameters = frozenset( + [ + 'auth_url', + 'id', + 'enabled', + 'description', + 'links', + 'relay_state_prefix', + 'sp_url', + ] + ) _id_path_param_name_override = 'sp_id' api_prefix = '/OS-FEDERATION' @@ -358,12 +383,14 @@ class ServiceProvidersResource(_ResourceBase): GET/HEAD /OS-FEDERATION/service_providers """ filters = ['id', 'enabled'] - ENFORCER.enforce_call(action='identity:list_service_providers', - filters=filters) + ENFORCER.enforce_call( + action='identity:list_service_providers', filters=filters + ) hints = self.build_driver_hints(filters) - refs = [self.filter_params(r) - for r in - PROVIDERS.federation_api.list_sps(hints=hints)] + refs = [ + self.filter_params(r) + for r in PROVIDERS.federation_api.list_sps(hints=hints) + ] return self.wrap_collection(refs, hints=hints) def put(self, sp_id): @@ -452,7 +479,7 @@ class OSFederationAuthResource(flask_restful.Resource): 'methods': [protocol_id], protocol_id: { 'identity_provider': idp_id, - 'protocol': protocol_id + 'protocol': protocol_id, }, } } @@ -476,17 +503,22 @@ class OSFederationAPI(ks_flask.APIBase): url='/saml2/metadata', resource_kwargs={}, rel='metadata', - resource_relation_func=_build_resource_relation), + resource_relation_func=_build_resource_relation, + ), ks_flask.construct_resource_map( resource=OSFederationAuthResource, - url=('/identity_providers//protocols/' - '/auth'), + url=( + '/identity_providers//protocols/' + '/auth' + ), resource_kwargs={}, rel='identity_provider_protocol_auth', resource_relation_func=_build_resource_relation, path_vars={ 'idp_id': IDP_ID_PARAMETER_RELATION, - 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION}), + 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION, + }, + ), ] @@ -505,15 +537,17 @@ class OSFederationIdentityProvidersProtocolsAPI(ks_flask.APIBase): resource_mapping = [ ks_flask.construct_resource_map( resource=IDPProtocolsCRUDResource, - url=('/OS-FEDERATION/identity_providers//protocols/' - ''), + url=( + '/OS-FEDERATION/identity_providers//protocols/' + '' + ), resource_kwargs={}, rel='identity_provider_protocol', resource_relation_func=_build_resource_relation, path_vars={ 'idp_id': IDP_ID_PARAMETER_RELATION, - 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION - } + 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION, + }, ), ks_flask.construct_resource_map( resource=IDPProtocolsListResource, @@ -521,9 +555,7 @@ class OSFederationIdentityProvidersProtocolsAPI(ks_flask.APIBase): resource_kwargs={}, rel='identity_provider_protocols', resource_relation_func=_build_resource_relation, - path_vars={ - 'idp_id': IDP_ID_PARAMETER_RELATION - } + path_vars={'idp_id': IDP_ID_PARAMETER_RELATION}, ), ] @@ -549,5 +581,5 @@ APIs = ( OSFederationIdentityProvidersAPI, OSFederationIdentityProvidersProtocolsAPI, OSFederationMappingsAPI, - OSFederationServiceProvidersAPI + OSFederationServiceProvidersAPI, ) diff --git a/keystone/api/os_inherit.py b/keystone/api/os_inherit.py index 33503cefa0..a0ff948d6b 100644 --- a/keystone/api/os_inherit.py +++ b/keystone/api/os_inherit.py @@ -32,9 +32,14 @@ LOG = log.getLogger(__name__) _build_resource_relation = json_home_relations.os_inherit_resource_rel_func -def _build_enforcement_target_attr(role_id=None, user_id=None, group_id=None, - project_id=None, domain_id=None, - allow_non_existing=False): +def _build_enforcement_target_attr( + role_id=None, + user_id=None, + group_id=None, + project_id=None, + domain_id=None, + allow_non_existing=False, +): """Check protection for role grant APIs. The policy rule might want to inspect attributes of any of the entities @@ -59,25 +64,33 @@ def _build_enforcement_target_attr(role_id=None, user_id=None, group_id=None, try: target['role'] = PROVIDERS.role_api.get_role(role_id) except exception.RoleNotFound: - LOG.info('Role (%(role_id)s) not found, Enforcement target of ' - '`role` remaind empty', {'role_id': role_id}) + LOG.info( + 'Role (%(role_id)s) not found, Enforcement target of ' + '`role` remaind empty', + {'role_id': role_id}, + ) target['role'] = {} if user_id: try: target['user'] = PROVIDERS.identity_api.get_user(user_id) except exception.UserNotFound: if not allow_non_existing: - LOG.info('User (%(user_id)s) was not found. Enforcement target' - ' of `user` remains empty.', {'user_id': user_id}) + LOG.info( + 'User (%(user_id)s) was not found. Enforcement target' + ' of `user` remains empty.', + {'user_id': user_id}, + ) target['user'] = {} else: try: target['group'] = PROVIDERS.identity_api.get_group(group_id) except exception.GroupNotFound: if not allow_non_existing: - LOG.info('Group (%(group_id)s) was not found. Enforcement ' - 'target of `group` remains empty.', - {'group_id': group_id}) + LOG.info( + 'Group (%(group_id)s) was not found. Enforcement ' + 'target of `group` remains empty.', + {'group_id': group_id}, + ) target['group'] = {} # NOTE(lbragstad): This if/else check will need to be expanded in the @@ -86,17 +99,21 @@ def _build_enforcement_target_attr(role_id=None, user_id=None, group_id=None, try: target['domain'] = PROVIDERS.resource_api.get_domain(domain_id) except exception.DomainNotFound: - LOG.info('Domain (%(domain_id)s) was not found. Enforcement ' - 'target of `domain` remains empty.', - {'domain_id': domain_id}) + LOG.info( + 'Domain (%(domain_id)s) was not found. Enforcement ' + 'target of `domain` remains empty.', + {'domain_id': domain_id}, + ) target['domain'] = {} elif project_id: try: target['project'] = PROVIDERS.resource_api.get_project(project_id) except exception.ProjectNotFound: - LOG.info('Project (%(project_id)s) was not found. Enforcement ' - 'target of `project` remains empty.', - {'project_id': project_id}) + LOG.info( + 'Project (%(project_id)s) was not found. Enforcement ' + 'target of `project` remains empty.', + {'project_id': project_id}, + ) target['project'] = {} return target @@ -111,13 +128,19 @@ class OSInheritDomainGroupRolesResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:check_grant', - build_target=functools.partial(_build_enforcement_target_attr, - domain_id=domain_id, - group_id=group_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + domain_id=domain_id, + group_id=group_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.get_grant( - domain_id=domain_id, group_id=group_id, role_id=role_id, - inherited_to_projects=True) + domain_id=domain_id, + group_id=group_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT def put(self, domain_id, group_id, role_id): @@ -128,13 +151,19 @@ class OSInheritDomainGroupRolesResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:create_grant', - build_target=functools.partial(_build_enforcement_target_attr, - domain_id=domain_id, - group_id=group_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + domain_id=domain_id, + group_id=group_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.create_grant( - domain_id=domain_id, group_id=group_id, role_id=role_id, - inherited_to_projects=True) + domain_id=domain_id, + group_id=group_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT def delete(self, domain_id, group_id, role_id): @@ -145,13 +174,19 @@ class OSInheritDomainGroupRolesResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:revoke_grant', - build_target=functools.partial(_build_enforcement_target_attr, - domain_id=domain_id, - group_id=group_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + domain_id=domain_id, + group_id=group_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.delete_grant( - domain_id=domain_id, group_id=group_id, role_id=role_id, - inherited_to_projects=True) + domain_id=domain_id, + group_id=group_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT @@ -164,13 +199,18 @@ class OSInheritDomainGroupRolesListResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:list_grants', - build_target=functools.partial(_build_enforcement_target_attr, - domain_id=domain_id, - group_id=group_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + domain_id=domain_id, + group_id=group_id, + ), + ) refs = PROVIDERS.assignment_api.list_grants( - domain_id=domain_id, group_id=group_id, inherited_to_projects=True) + domain_id=domain_id, group_id=group_id, inherited_to_projects=True + ) return ks_flask.ResourceBase.wrap_collection( - refs, collection_name='roles') + refs, collection_name='roles' + ) class OSInheritDomainUserRolesResource(flask_restful.Resource): @@ -182,13 +222,19 @@ class OSInheritDomainUserRolesResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:check_grant', - build_target=functools.partial(_build_enforcement_target_attr, - domain_id=domain_id, - user_id=user_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + domain_id=domain_id, + user_id=user_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.get_grant( - domain_id=domain_id, user_id=user_id, role_id=role_id, - inherited_to_projects=True) + domain_id=domain_id, + user_id=user_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT def put(self, domain_id, user_id, role_id): @@ -199,13 +245,19 @@ class OSInheritDomainUserRolesResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:create_grant', - build_target=functools.partial(_build_enforcement_target_attr, - domain_id=domain_id, - user_id=user_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + domain_id=domain_id, + user_id=user_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.create_grant( - domain_id=domain_id, user_id=user_id, role_id=role_id, - inherited_to_projects=True) + domain_id=domain_id, + user_id=user_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT def delete(self, domain_id, user_id, role_id): @@ -216,13 +268,19 @@ class OSInheritDomainUserRolesResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:revoke_grant', - build_target=functools.partial(_build_enforcement_target_attr, - domain_id=domain_id, - user_id=user_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + domain_id=domain_id, + user_id=user_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.delete_grant( - domain_id=domain_id, user_id=user_id, role_id=role_id, - inherited_to_projects=True) + domain_id=domain_id, + user_id=user_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT @@ -235,13 +293,18 @@ class OSInheritDomainUserRolesListResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:list_grants', - build_target=functools.partial(_build_enforcement_target_attr, - domain_id=domain_id, - user_id=user_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + domain_id=domain_id, + user_id=user_id, + ), + ) refs = PROVIDERS.assignment_api.list_grants( - domain_id=domain_id, user_id=user_id, inherited_to_projects=True) + domain_id=domain_id, user_id=user_id, inherited_to_projects=True + ) return ks_flask.ResourceBase.wrap_collection( - refs, collection_name='roles') + refs, collection_name='roles' + ) class OSInheritProjectUserResource(flask_restful.Resource): @@ -253,13 +316,19 @@ class OSInheritProjectUserResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:check_grant', - build_target=functools.partial(_build_enforcement_target_attr, - project_id=project_id, - user_id=user_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + project_id=project_id, + user_id=user_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.get_grant( - project_id=project_id, user_id=user_id, role_id=role_id, - inherited_to_projects=True) + project_id=project_id, + user_id=user_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT def put(self, project_id, user_id, role_id): @@ -270,13 +339,19 @@ class OSInheritProjectUserResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:create_grant', - build_target=functools.partial(_build_enforcement_target_attr, - project_id=project_id, - user_id=user_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + project_id=project_id, + user_id=user_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.create_grant( - project_id=project_id, user_id=user_id, role_id=role_id, - inherited_to_projects=True) + project_id=project_id, + user_id=user_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT def delete(self, project_id, user_id, role_id): @@ -287,13 +362,19 @@ class OSInheritProjectUserResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:revoke_grant', - build_target=functools.partial(_build_enforcement_target_attr, - project_id=project_id, - user_id=user_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + project_id=project_id, + user_id=user_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.delete_grant( - project_id=project_id, user_id=user_id, role_id=role_id, - inherited_to_projects=True) + project_id=project_id, + user_id=user_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT @@ -306,13 +387,19 @@ class OSInheritProjectGroupResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:check_grant', - build_target=functools.partial(_build_enforcement_target_attr, - project_id=project_id, - group_id=group_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + project_id=project_id, + group_id=group_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.get_grant( - project_id=project_id, group_id=group_id, role_id=role_id, - inherited_to_projects=True) + project_id=project_id, + group_id=group_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT def put(self, project_id, group_id, role_id): @@ -323,13 +410,19 @@ class OSInheritProjectGroupResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:create_grant', - build_target=functools.partial(_build_enforcement_target_attr, - project_id=project_id, - group_id=group_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + project_id=project_id, + group_id=group_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.create_grant( - project_id=project_id, group_id=group_id, role_id=role_id, - inherited_to_projects=True) + project_id=project_id, + group_id=group_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT def delete(self, project_id, group_id, role_id): @@ -340,13 +433,19 @@ class OSInheritProjectGroupResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:revoke_grant', - build_target=functools.partial(_build_enforcement_target_attr, - project_id=project_id, - group_id=group_id, - role_id=role_id)) + build_target=functools.partial( + _build_enforcement_target_attr, + project_id=project_id, + group_id=group_id, + role_id=role_id, + ), + ) PROVIDERS.assignment_api.delete_grant( - project_id=project_id, group_id=group_id, role_id=role_id, - inherited_to_projects=True) + project_id=project_id, + group_id=group_id, + role_id=role_id, + inherited_to_projects=True, + ) return None, http.client.NO_CONTENT @@ -358,68 +457,92 @@ class OSInheritAPI(ks_flask.APIBase): resource_mapping = [ ks_flask.construct_resource_map( resource=OSInheritDomainGroupRolesResource, - url=('/domains//groups//roles' - '//inherited_to_projects'), + url=( + '/domains//groups//roles' + '//inherited_to_projects' + ), resource_kwargs={}, rel='domain_group_role_inherited_to_projects', resource_relation_func=_build_resource_relation, path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID}), + 'role_id': json_home.Parameters.ROLE_ID, + }, + ), ks_flask.construct_resource_map( resource=OSInheritDomainGroupRolesListResource, - url=('/domains//groups//roles' - '/inherited_to_projects'), + url=( + '/domains//groups//roles' + '/inherited_to_projects' + ), resource_kwargs={}, rel='domain_group_roles_inherited_to_projects', resource_relation_func=_build_resource_relation, path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID}), + 'group_id': json_home.Parameters.GROUP_ID, + }, + ), ks_flask.construct_resource_map( resource=OSInheritDomainUserRolesResource, - url=('/domains//users//roles' - '//inherited_to_projects'), + url=( + '/domains//users//roles' + '//inherited_to_projects' + ), resource_kwargs={}, rel='domain_user_role_inherited_to_projects', resource_relation_func=_build_resource_relation, path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, 'user_id': json_home.Parameters.USER_ID, - 'role_id': json_home.Parameters.ROLE_ID}), + 'role_id': json_home.Parameters.ROLE_ID, + }, + ), ks_flask.construct_resource_map( resource=OSInheritDomainUserRolesListResource, - url=('/domains//users//roles' - '/inherited_to_projects'), + url=( + '/domains//users//roles' + '/inherited_to_projects' + ), resource_kwargs={}, rel='domain_user_roles_inherited_to_projects', resource_relation_func=_build_resource_relation, path_vars={ 'domain_id': json_home.Parameters.DOMAIN_ID, - 'user_id': json_home.Parameters.USER_ID}), + 'user_id': json_home.Parameters.USER_ID, + }, + ), ks_flask.construct_resource_map( resource=OSInheritProjectUserResource, - url=('projects//users//roles' - '//inherited_to_projects'), + url=( + 'projects//users//roles' + '//inherited_to_projects' + ), resource_kwargs={}, rel='project_user_role_inherited_to_projects', resource_relation_func=_build_resource_relation, path_vars={ 'project_id': json_home.Parameters.PROJECT_ID, 'user_id': json_home.Parameters.USER_ID, - 'role_id': json_home.Parameters.ROLE_ID}), + 'role_id': json_home.Parameters.ROLE_ID, + }, + ), ks_flask.construct_resource_map( resource=OSInheritProjectGroupResource, - url=('projects//groups//roles' - '//inherited_to_projects'), + url=( + 'projects//groups//roles' + '//inherited_to_projects' + ), resource_kwargs={}, rel='project_group_role_inherited_to_projects', resource_relation_func=_build_resource_relation, path_vars={ 'project_id': json_home.Parameters.PROJECT_ID, 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID}) + 'role_id': json_home.Parameters.ROLE_ID, + }, + ), ] diff --git a/keystone/api/os_oauth1.py b/keystone/api/os_oauth1.py index 615d29c516..b31a6fa2b4 100644 --- a/keystone/api/os_oauth1.py +++ b/keystone/api/os_oauth1.py @@ -46,7 +46,8 @@ _build_resource_relation = json_home_relations.os_oauth1_resource_rel_func _build_parameter_relation = json_home_relations.os_oauth1_parameter_rel_func _ACCESS_TOKEN_ID_PARAMETER_RELATION = _build_parameter_relation( - parameter_name='access_token_id') + parameter_name='access_token_id' +) def _normalize_role_list(authorize_roles): @@ -55,8 +56,9 @@ def _normalize_role_list(authorize_roles): if role.get('id'): roles.add(role['id']) else: - roles.add(PROVIDERS.role_api.get_unique_role_by_name( - role['name'])['id']) + roles.add( + PROVIDERS.role_api.get_unique_role_by_name(role['name'])['id'] + ) return roles @@ -102,12 +104,14 @@ class ConsumerResource(ks_flask.ResourceBase): def post(self): ENFORCER.enforce_call(action='identity:create_consumer') consumer = (flask.request.get_json(force=True, silent=True) or {}).get( - 'consumer', {}) + 'consumer', {} + ) consumer = self._normalize_dict(consumer) validation.lazy_validate(schema.consumer_create, consumer) consumer = self._assign_unique_id(consumer) ref = PROVIDERS.oauth_api.create_consumer( - consumer, initiator=self.audit_initiator) + consumer, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def delete(self, consumer_id): @@ -116,23 +120,25 @@ class ConsumerResource(ks_flask.ResourceBase): 'Invalidating token cache because consumer %(consumer_id)s has ' 'been deleted. Authorization for users with OAuth tokens will be ' 'recalculated and enforced accordingly the next time they ' - 'authenticate or validate a token.' % - {'consumer_id': consumer_id} + 'authenticate or validate a token.' % {'consumer_id': consumer_id} ) notifications.invalidate_token_cache_notification(reason) PROVIDERS.oauth_api.delete_consumer( - consumer_id, initiator=self.audit_initiator) + consumer_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT def patch(self, consumer_id): ENFORCER.enforce_call(action='identity:update_consumer') consumer = (flask.request.get_json(force=True, silent=True) or {}).get( - 'consumer', {}) + 'consumer', {} + ) validation.lazy_validate(schema.consumer_update, consumer) consumer = self._normalize_dict(consumer) self._require_matching_id(consumer) ref = PROVIDERS.oauth_api.update_consumer( - consumer_id, consumer, initiator=self.audit_initiator) + consumer_id, consumer, initiator=self.audit_initiator + ) return self.wrap_member(ref) @@ -142,14 +148,17 @@ class RequestTokenResource(_OAuth1ResourceBase): oauth_headers = oauth1.get_oauth_headers(flask.request.headers) consumer_id = oauth_headers.get('oauth_consumer_key') requested_project_id = flask.request.headers.get( - 'Requested-Project-Id') + 'Requested-Project-Id' + ) if not consumer_id: raise exception.ValidationError( - attribute='oauth_consumer_key', target='request') + attribute='oauth_consumer_key', target='request' + ) if not requested_project_id: raise exception.ValidationError( - attribute='Requested-Project-Id', target='request') + attribute='Requested-Project-Id', target='request' + ) # NOTE(stevemar): Ensure consumer and requested project exist PROVIDERS.resource_api.get_project(requested_project_id) @@ -160,10 +169,14 @@ class RequestTokenResource(_OAuth1ResourceBase): req_headers.update(flask.request.headers) request_verifier = oauth1.RequestTokenEndpoint( request_validator=validator.OAuthValidator(), - token_generator=oauth1.token_generator) + token_generator=oauth1.token_generator, + ) h, b, s = request_verifier.create_request_token_response( - url, http_method='POST', body=flask.request.args, - headers=req_headers) + url, + http_method='POST', + body=flask.request.args, + headers=req_headers, + ) if not b: msg = _('Invalid signature') raise exception.Unauthorized(message=msg) @@ -174,11 +187,13 @@ class RequestTokenResource(_OAuth1ResourceBase): consumer_id, requested_project_id, request_token_duration, - initiator=notifications.build_audit_initiator()) + initiator=notifications.build_audit_initiator(), + ) - result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s' - % {'key': token_ref['id'], - 'secret': token_ref['request_secret']}) + result = 'oauth_token=%(key)s&oauth_token_secret=%(secret)s' % { + 'key': token_ref['id'], + 'secret': token_ref['request_secret'], + } if CONF.oauth1.request_token_duration > 0: expiry_bit = '&oauth_expires_at=%s' % token_ref['expires_at'] @@ -199,35 +214,40 @@ class AccessTokenResource(_OAuth1ResourceBase): if not consumer_id: raise exception.ValidationError( - attribute='oauth_consumer_key', target='request') + attribute='oauth_consumer_key', target='request' + ) if not request_token_id: raise exception.ValidationError( - attribute='oauth_token', target='request') + attribute='oauth_token', target='request' + ) if not oauth_verifier: raise exception.ValidationError( - attribute='oauth_verifier', target='request') + attribute='oauth_verifier', target='request' + ) - req_token = PROVIDERS.oauth_api.get_request_token( - request_token_id) + req_token = PROVIDERS.oauth_api.get_request_token(request_token_id) expires_at = req_token['expires_at'] if expires_at: now = timeutils.utcnow() expires = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) + timeutils.parse_isotime(expires_at) + ) if now > expires: raise exception.Unauthorized(_('Request token is expired')) url = _update_url_scheme() access_verifier = oauth1.AccessTokenEndpoint( request_validator=validator.OAuthValidator(), - token_generator=oauth1.token_generator) + token_generator=oauth1.token_generator, + ) try: h, b, s = access_verifier.create_access_token_response( url, http_method='POST', body=flask.request.args, - headers=dict(flask.request.headers)) + headers=dict(flask.request.headers), + ) except NotImplementedError: # Client key or request token validation failed, since keystone # does not yet support dummy client or dummy request token, @@ -239,10 +259,14 @@ class AccessTokenResource(_OAuth1ResourceBase): LOG.warning('Provided consumer does not exist.') raise exception.Unauthorized(message=msg) if req_token['consumer_id'] != consumer_id: - msg = ('Provided consumer key does not match stored consumer ' - 'key.') - tr_msg = _('Provided consumer key does not match stored ' - 'consumer key.') + msg = ( + 'Provided consumer key does not match stored consumer ' + 'key.' + ) + tr_msg = _( + 'Provided consumer key does not match stored ' + 'consumer key.' + ) LOG.warning(msg) raise exception.Unauthorized(message=tr_msg) # The response body is empty since either one of the following reasons @@ -266,11 +290,13 @@ class AccessTokenResource(_OAuth1ResourceBase): token_ref = PROVIDERS.oauth_api.create_access_token( request_token_id, access_token_duration, - initiator=notifications.build_audit_initiator()) + initiator=notifications.build_audit_initiator(), + ) - result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s' - % {'key': token_ref['id'], - 'secret': token_ref['access_secret']}) + result = 'oauth_token=%(key)s&oauth_token_secret=%(secret)s' % { + 'key': token_ref['id'], + 'secret': token_ref['access_secret'], + } if CONF.oauth1.access_token_duration > 0: expiry_bit = '&oauth_expires_at=%s' % (token_ref['expires_at']) @@ -285,13 +311,17 @@ class AuthorizeResource(_OAuth1ResourceBase): def put(self, request_token_id): ENFORCER.enforce_call(action='identity:authorize_request_token') roles = (flask.request.get_json(force=True, silent=True) or {}).get( - 'roles', []) + 'roles', [] + ) validation.lazy_validate(schema.request_token_authorize, roles) ctx = flask.request.environ[context.REQUEST_CONTEXT_ENV] if ctx.is_delegated_auth: raise exception.Forbidden( - _('Cannot authorize a request token with a token issued via ' - 'delegation.')) + _( + 'Cannot authorize a request token with a token issued via ' + 'delegation.' + ) + ) req_token = PROVIDERS.oauth_api.get_request_token(request_token_id) @@ -299,7 +329,8 @@ class AuthorizeResource(_OAuth1ResourceBase): if expires_at: now = timeutils.utcnow() expires = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) + timeutils.parse_isotime(expires_at) + ) if now > expires: raise exception.Unauthorized(_('Request token is expired')) @@ -308,7 +339,8 @@ class AuthorizeResource(_OAuth1ResourceBase): # verify the authorizing user has the roles try: auth_context = flask.request.environ[ - authorization.AUTH_CONTEXT_ENV] + authorization.AUTH_CONTEXT_ENV + ] user_token_ref = auth_context['token'] except KeyError: LOG.warning("Couldn't find the auth context.") @@ -317,7 +349,8 @@ class AuthorizeResource(_OAuth1ResourceBase): user_id = user_token_ref.user_id project_id = req_token['requested_project_id'] user_roles = PROVIDERS.assignment_api.get_roles_for_user_and_project( - user_id, project_id) + user_id, project_id + ) cred_set = set(user_roles) if not cred_set.issuperset(authed_roles): @@ -329,7 +362,8 @@ class AuthorizeResource(_OAuth1ResourceBase): # finally authorize the token authed_token = PROVIDERS.oauth_api.authorize_request_token( - request_token_id, user_id, role_ids) + request_token_id, user_id, role_ids + ) to_return = {'token': {'oauth_verifier': authed_token['verifier']}} return to_return @@ -346,14 +380,14 @@ class OSAuth1API(ks_flask.APIBase): url='/request_token', resource_kwargs={}, rel='request_tokens', - resource_relation_func=_build_resource_relation + resource_relation_func=_build_resource_relation, ), ks_flask.construct_resource_map( resource=AccessTokenResource, url='/access_token', rel='access_tokens', resource_kwargs={}, - resource_relation_func=_build_resource_relation + resource_relation_func=_build_resource_relation, ), ks_flask.construct_resource_map( resource=AuthorizeResource, @@ -363,8 +397,11 @@ class OSAuth1API(ks_flask.APIBase): resource_relation_func=_build_resource_relation, path_vars={ 'request_token_id': _build_parameter_relation( - parameter_name='request_token_id') - })] + parameter_name='request_token_id' + ) + }, + ), + ] APIs = (OSAuth1API,) diff --git a/keystone/api/os_oauth2.py b/keystone/api/os_oauth2.py index 81f3dbd3dc..45b7cb055d 100644 --- a/keystone/api/os_oauth2.py +++ b/keystone/api/os_oauth2.py @@ -42,7 +42,8 @@ class AccessTokenResource(ks_flask.ResourceBase): raise exception.OAuth2OtherError( int(http.client.METHOD_NOT_ALLOWED), http.client.responses[http.client.METHOD_NOT_ALLOWED], - _('The method is not allowed for the requested URL.')) + _('The method is not allowed for the requested URL.'), + ) @ks_flask.unenforced_api def get(self): @@ -80,18 +81,22 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidRequest( int(http.client.BAD_REQUEST), http.client.responses[http.client.BAD_REQUEST], - _('The parameter grant_type is required.')) - LOG.info('Get OAuth2.0 Access Token API: ' - f'{error.message_format}') + _('The parameter grant_type is required.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' f'{error.message_format}' + ) raise error if grant_type != 'client_credentials': error = exception.OAuth2UnsupportedGrantType( int(http.client.BAD_REQUEST), http.client.responses[http.client.BAD_REQUEST], - _('The parameter grant_type %s is not supported.' - ) % grant_type) - LOG.info('Get OAuth2.0 Access Token API: ' - f'{error.message_format}') + _('The parameter grant_type %s is not supported.') + % grant_type, + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' f'{error.message_format}' + ) raise error auth_method = '' @@ -107,9 +112,12 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: ' - 'failed to get a client_id from the request.') + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'failed to get a client_id from the request.' + ) raise error if client_cert: auth_method = 'tls_client_auth' @@ -125,9 +133,12 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: ' - 'failed to get client credentials from the request.') + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'failed to get client credentials from the request.' + ) raise error def _client_secret_basic(self, client_id, client_secret): @@ -137,8 +148,8 @@ class AccessTokenResource(ks_flask.ResourceBase): 'methods': ['application_credential'], 'application_credential': { 'id': client_id, - 'secret': client_secret - } + 'secret': client_secret, + }, } } try: @@ -146,32 +157,37 @@ class AccessTokenResource(ks_flask.ResourceBase): except exception.Error as error: if error.code == 401: error = exception.OAuth2InvalidClient( - error.code, error.title, - str(error)) + error.code, error.title, str(error) + ) elif error.code == 400: error = exception.OAuth2InvalidRequest( - error.code, error.title, - str(error)) + error.code, error.title, str(error) + ) else: error = exception.OAuth2OtherError( - error.code, error.title, + error.code, + error.title, 'An unknown error occurred and failed to get an OAuth2.0 ' - 'access token.') + 'access token.', + ) LOG.exception(error) raise error except Exception as error: error = exception.OAuth2OtherError( int(http.client.INTERNAL_SERVER_ERROR), http.client.responses[http.client.INTERNAL_SERVER_ERROR], - str(error)) + str(error), + ) LOG.exception(error) raise error - resp = make_response({ - 'access_token': token.id, - 'token_type': 'Bearer', - 'expires_in': CONF.token.expiration - }) + resp = make_response( + { + 'access_token': token.id, + 'token_type': 'Bearer', + 'expires_in': CONF.token.expiration, + } + ) resp.status = '200 OK' return resp @@ -183,14 +199,18 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: ' - 'mapping id %s is not found. ', - mapping_id) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'mapping id %s is not found. ', + mapping_id, + ) raise error rule_processor = federation_utils.RuleProcessor( - mapping.get('id'), mapping.get('rules')) + mapping.get('id'), mapping.get('rules') + ) try: mapped_properties = rule_processor.process(cert_dn) except exception.Error as error: @@ -198,24 +218,32 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: ' - 'mapping rule process failed. ' - 'mapping_id: %s, rules: %s, data: %s.', - mapping_id, mapping.get('rules'), - jsonutils.dumps(cert_dn)) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'mapping rule process failed. ' + 'mapping_id: %s, rules: %s, data: %s.', + mapping_id, + mapping.get('rules'), + jsonutils.dumps(cert_dn), + ) raise error except Exception as error: LOG.exception(error) error = exception.OAuth2OtherError( int(http.client.INTERNAL_SERVER_ERROR), http.client.responses[http.client.INTERNAL_SERVER_ERROR], - str(error)) - LOG.info('Get OAuth2.0 Access Token API: ' - 'mapping rule process failed. ' - 'mapping_id: %s, rules: %s, data: %s.', - mapping_id, mapping.get('rules'), - jsonutils.dumps(cert_dn)) + str(error), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'mapping rule process failed. ' + 'mapping_id: %s, rules: %s, data: %s.', + mapping_id, + mapping.get('rules'), + jsonutils.dumps(cert_dn), + ) raise error mapping_user = mapped_properties.get('user', {}) @@ -229,50 +257,77 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.', - 'user name', mapping_user_name, user.get('name')) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: %s check failed. ' + 'DN value: %s, DB value: %s.', + 'user name', + mapping_user_name, + user.get('name'), + ) raise error if mapping_user_id and mapping_user_id != user.get('id'): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.', - 'user id', mapping_user_id, user.get('id')) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: %s check failed. ' + 'DN value: %s, DB value: %s.', + 'user id', + mapping_user_id, + user.get('id'), + ) raise error if mapping_user_email and mapping_user_email != user.get('email'): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.', - 'user email', mapping_user_email, user.get('email')) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: %s check failed. ' + 'DN value: %s, DB value: %s.', + 'user email', + mapping_user_email, + user.get('email'), + ) raise error - if (mapping_user_domain_id and - mapping_user_domain_id != user_domain.get('id')): + if ( + mapping_user_domain_id + and mapping_user_domain_id != user_domain.get('id') + ): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.', - 'user domain id', mapping_user_domain_id, - user_domain.get('id')) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: %s check failed. ' + 'DN value: %s, DB value: %s.', + 'user domain id', + mapping_user_domain_id, + user_domain.get('id'), + ) raise error - if (mapping_user_domain_name and - mapping_user_domain_name != user_domain.get('name')): + if ( + mapping_user_domain_name + and mapping_user_domain_name != user_domain.get('name') + ): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.', - 'user domain name', mapping_user_domain_name, - user_domain.get('name')) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: %s check failed. ' + 'DN value: %s, DB value: %s.', + 'user domain name', + mapping_user_domain_name, + user_domain.get('name'), + ) raise error def _tls_client_auth(self, client_id, client_cert): @@ -283,9 +338,12 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: ' - 'failed to get the subject DN from the certificate.') + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'failed to get the subject DN from the certificate.' + ) raise error try: cert_issuer_dn = utils.get_certificate_issuer_dn(client_cert) @@ -293,17 +351,22 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: ' - 'failed to get the issuer DN from the certificate.') + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'failed to get the issuer DN from the certificate.' + ) raise error client_cert_dn = {} for key in cert_subject_dn: - client_cert_dn['SSL_CLIENT_SUBJECT_DN_%s' % - key.upper()] = cert_subject_dn.get(key) + client_cert_dn['SSL_CLIENT_SUBJECT_DN_%s' % key.upper()] = ( + cert_subject_dn.get(key) + ) for key in cert_issuer_dn: - client_cert_dn['SSL_CLIENT_ISSUER_DN_%s' % - key.upper()] = cert_issuer_dn.get(key) + client_cert_dn['SSL_CLIENT_ISSUER_DN_%s' % key.upper()] = ( + cert_issuer_dn.get(key) + ) try: user = PROVIDERS.identity_api.get_user(client_id) @@ -311,24 +374,29 @@ class AccessTokenResource(ks_flask.ResourceBase): error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: ' - 'the user does not exist. user id: %s.', - client_id) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'the user does not exist. user id: %s.', + client_id, + ) raise error project_id = user.get('default_project_id') if not project_id: error = exception.OAuth2InvalidClient( int(http.client.UNAUTHORIZED), http.client.responses[http.client.UNAUTHORIZED], - _('Client authentication failed.')) - LOG.info('Get OAuth2.0 Access Token API: ' - 'the user does not have default project. user id: %s.', - client_id) + _('Client authentication failed.'), + ) + LOG.info( + 'Get OAuth2.0 Access Token API: ' + 'the user does not have default project. user id: %s.', + client_id, + ) raise error - user_domain = PROVIDERS.resource_api.get_domain( - user.get('domain_id')) + user_domain = PROVIDERS.resource_api.get_domain(user.get('domain_id')) self._check_mapped_properties(client_cert_dn, user, user_domain) thumbprint = utils.get_certificate_thumbprint(client_cert) LOG.debug(f'The mTLS certificate thumbprint: {thumbprint}') @@ -337,37 +405,42 @@ class AccessTokenResource(ks_flask.ResourceBase): user_id=client_id, method_names=['oauth2_credential'], project_id=project_id, - thumbprint=thumbprint + thumbprint=thumbprint, ) except exception.Error as error: if error.code == 401: error = exception.OAuth2InvalidClient( - error.code, error.title, - str(error)) + error.code, error.title, str(error) + ) elif error.code == 400: error = exception.OAuth2InvalidRequest( - error.code, error.title, - str(error)) + error.code, error.title, str(error) + ) else: error = exception.OAuth2OtherError( - error.code, error.title, + error.code, + error.title, 'An unknown error occurred and failed to get an OAuth2.0 ' - 'access token.') + 'access token.', + ) LOG.exception(error) raise error except Exception as error: error = exception.OAuth2OtherError( int(http.client.INTERNAL_SERVER_ERROR), http.client.responses[http.client.INTERNAL_SERVER_ERROR], - str(error)) + str(error), + ) LOG.exception(error) raise error - resp = make_response({ - 'access_token': token.id, - 'token_type': 'Bearer', - 'expires_in': CONF.token.expiration - }) + resp = make_response( + { + 'access_token': token.id, + 'token_type': 'Bearer', + 'expires_in': CONF.token.expiration, + } + ) resp.status = '200 OK' return resp @@ -383,8 +456,9 @@ class OSAuth2API(ks_flask.APIBase): url='/token', rel='token', resource_kwargs={}, - resource_relation_func=_build_resource_relation - )] + resource_relation_func=_build_resource_relation, + ) + ] APIs = (OSAuth2API,) diff --git a/keystone/api/os_revoke.py b/keystone/api/os_revoke.py index f5bf411a69..681171097e 100644 --- a/keystone/api/os_revoke.py +++ b/keystone/api/os_revoke.py @@ -39,10 +39,12 @@ class OSRevokeResource(flask_restful.Resource): if since: try: last_fetch = timeutils.normalize_time( - timeutils.parse_isotime(since)) + timeutils.parse_isotime(since) + ) except ValueError: raise exception.ValidationError( - message=_('invalidate date format %s') % since) + message=_('invalidate date format %s') % since + ) # FIXME(notmorgan): The revocation events cannot have resource options # added to them or lazy-loaded relationships as long as to_dict # is called outside of an active session context. This API is unused @@ -51,12 +53,14 @@ class OSRevokeResource(flask_restful.Resource): # events themselves. events = PROVIDERS.revoke_api.list_events(last_fetch=last_fetch) # Build the links by hand as the standard controller calls require ids - response = {'events': [event.to_dict() for event in events], - 'links': { - 'next': None, - 'self': ks_flask.base_url(path='/OS-REVOKE/events'), - 'previous': None} - } + response = { + 'events': [event.to_dict() for event in events], + 'links': { + 'next': None, + 'self': ks_flask.base_url(path='/OS-REVOKE/events'), + 'previous': None, + }, + } return response @@ -71,7 +75,7 @@ class OSRevokeAPI(ks_flask.APIBase): url='/events', resource_kwargs={}, rel='events', - resource_relation_func=_build_resource_relation + resource_relation_func=_build_resource_relation, ) ] diff --git a/keystone/api/os_simple_cert.py b/keystone/api/os_simple_cert.py index 7250c6c76a..b5bea8494b 100644 --- a/keystone/api/os_simple_cert.py +++ b/keystone/api/os_simple_cert.py @@ -31,16 +31,22 @@ class SimpleCertCAResource(flask_restful.Resource): @ks_flask.unenforced_api def get(self): raise exception.Gone( - message=_('This API is no longer available due to the removal ' - 'of support for PKI tokens.')) + message=_( + 'This API is no longer available due to the removal ' + 'of support for PKI tokens.' + ) + ) class SimpleCertListResource(flask_restful.Resource): @ks_flask.unenforced_api def get(self): raise exception.Gone( - message=_('This API is no longer available due to the removal ' - 'of support for PKI tokens.')) + message=_( + 'This API is no longer available due to the removal ' + 'of support for PKI tokens.' + ) + ) class SimpleCertAPI(ks_flask.APIBase): @@ -53,13 +59,15 @@ class SimpleCertAPI(ks_flask.APIBase): url='/OS-SIMPLE-CERT/ca', resource_kwargs={}, rel='ca_certificate', - resource_relation_func=_build_resource_relation), + resource_relation_func=_build_resource_relation, + ), ks_flask.construct_resource_map( resource=SimpleCertListResource, url='/OS-SIMPLE-CERT/certificates', resource_kwargs={}, rel='certificates', - resource_relation_func=_build_resource_relation), + resource_relation_func=_build_resource_relation, + ), ] diff --git a/keystone/api/policy.py b/keystone/api/policy.py index 6a51f9c58d..e68deefe03 100644 --- a/keystone/api/policy.py +++ b/keystone/api/policy.py @@ -41,7 +41,7 @@ class PolicyResource(ks_flask.ResourceBase): @versionutils.deprecated( as_of=versionutils.deprecated.QUEENS, - what='identity:get_policy of the v3 Policy APIs' + what='identity:get_policy of the v3 Policy APIs', ) def _get_policy(self, policy_id): ENFORCER.enforce_call(action='identity:get_policy') @@ -50,7 +50,7 @@ class PolicyResource(ks_flask.ResourceBase): @versionutils.deprecated( as_of=versionutils.deprecated.QUEENS, - what='identity:list_policies of the v3 Policy APIs' + what='identity:list_policies of the v3 Policy APIs', ) def _list_policies(self): ENFORCER.enforce_call(action='identity:list_policies') @@ -61,7 +61,7 @@ class PolicyResource(ks_flask.ResourceBase): @versionutils.deprecated( as_of=versionutils.deprecated.QUEENS, - what='identity:create_policy of the v3 Policy APIs' + what='identity:create_policy of the v3 Policy APIs', ) def post(self): ENFORCER.enforce_call(action='identity:create_policy') @@ -76,7 +76,7 @@ class PolicyResource(ks_flask.ResourceBase): @versionutils.deprecated( as_of=versionutils.deprecated.QUEENS, - what='identity:update_policy of the v3 Policy APIs' + what='identity:update_policy of the v3 Policy APIs', ) def patch(self, policy_id): ENFORCER.enforce_call(action='identity:update_policy') @@ -90,7 +90,7 @@ class PolicyResource(ks_flask.ResourceBase): @versionutils.deprecated( as_of=versionutils.deprecated.QUEENS, - what='identity:delete_policy of the v3 Policy APIs' + what='identity:delete_policy of the v3 Policy APIs', ) def delete(self, policy_id): ENFORCER.enforce_call(action='identity:delete_policy') @@ -231,45 +231,51 @@ class PolicyAPI(ks_flask.APIBase): resource_kwargs={}, rel='policy_endpoints', path_vars={'policy_id': json_home.Parameters.POLICY_ID}, - resource_relation_func=_resource_rel_func + resource_relation_func=_resource_rel_func, ), ks_flask.construct_resource_map( resource=EndpointPolicyAssociations, - url=('/policies//OS-ENDPOINT-POLICY/' - 'endpoints/'), + url=( + '/policies//OS-ENDPOINT-POLICY/' + 'endpoints/' + ), resource_kwargs={}, rel='endpoint_policy_association', path_vars={ 'policy_id': json_home.Parameters.POLICY_ID, - 'endpoint_id': json_home.Parameters.ENDPOINT_ID + 'endpoint_id': json_home.Parameters.ENDPOINT_ID, }, - resource_relation_func=_resource_rel_func + resource_relation_func=_resource_rel_func, ), ks_flask.construct_resource_map( resource=ServicePolicyAssociations, - url=('/policies//OS-ENDPOINT-POLICY/' - 'services/'), + url=( + '/policies//OS-ENDPOINT-POLICY/' + 'services/' + ), resource_kwargs={}, rel='service_policy_association', path_vars={ 'policy_id': json_home.Parameters.POLICY_ID, - 'service_id': json_home.Parameters.SERVICE_ID + 'service_id': json_home.Parameters.SERVICE_ID, }, - resource_relation_func=_resource_rel_func + resource_relation_func=_resource_rel_func, ), ks_flask.construct_resource_map( resource=ServiceRegionPolicyAssociations, - url=('/policies//OS-ENDPOINT-POLICY/' - 'services//regions/'), + url=( + '/policies//OS-ENDPOINT-POLICY/' + 'services//regions/' + ), resource_kwargs={}, rel='region_and_service_policy_association', path_vars={ 'policy_id': json_home.Parameters.POLICY_ID, 'service_id': json_home.Parameters.SERVICE_ID, - 'region_id': json_home.Parameters.REGION_ID + 'region_id': json_home.Parameters.REGION_ID, }, - resource_relation_func=_resource_rel_func - ) + resource_relation_func=_resource_rel_func, + ), ] diff --git a/keystone/api/projects.py b/keystone/api/projects.py index 27725d6b6c..99576c9acc 100644 --- a/keystone/api/projects.py +++ b/keystone/api/projects.py @@ -50,7 +50,8 @@ class ProjectResource(ks_flask.ResourceBase): collection_key = 'projects' member_key = 'project' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='resource_api', method='get_project') + api='resource_api', method='get_project' + ) def _expand_project_ref(self, ref): parents_as_list = self.query_filter_is_true('parents_as_list') @@ -63,21 +64,25 @@ class ProjectResource(ks_flask.ResourceBase): # parents_as_list and parents_as_ids are mutually exclusive if parents_as_list and parents_as_ids: - msg = _('Cannot use parents_as_list and parents_as_ids query ' - 'params at the same time.') + msg = _( + 'Cannot use parents_as_list and parents_as_ids query ' + 'params at the same time.' + ) raise exception.ValidationError(msg) # subtree_as_list and subtree_as_ids are mutually exclusive if subtree_as_list and subtree_as_ids: - msg = _('Cannot use subtree_as_list and subtree_as_ids query ' - 'params at the same time.') + msg = _( + 'Cannot use subtree_as_list and subtree_as_ids query ' + 'params at the same time.' + ) raise exception.ValidationError(msg) if parents_as_list: parents = PROVIDERS.resource_api.list_project_parents( - ref['id'], self.oslo_context.user_id, include_limits) - ref['parents'] = [self.wrap_member(p) - for p in parents] + ref['id'], self.oslo_context.user_id, include_limits + ) + ref['parents'] = [self.wrap_member(p) for p in parents] elif parents_as_ids: ref['parents'] = PROVIDERS.resource_api.get_project_parents_as_ids( ref @@ -85,9 +90,9 @@ class ProjectResource(ks_flask.ResourceBase): if subtree_as_list: subtree = PROVIDERS.resource_api.list_projects_in_subtree( - ref['id'], self.oslo_context.user_id, include_limits) - ref['subtree'] = [self.wrap_member(p) - for p in subtree] + ref['id'], self.oslo_context.user_id, include_limits + ) + ref['subtree'] = [self.wrap_member(p) for p in subtree] elif subtree_as_ids: ref['subtree'] = ( PROVIDERS.resource_api.get_projects_in_subtree_as_ids( @@ -102,7 +107,7 @@ class ProjectResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:get_project', - build_target=_build_project_target_enforcement + build_target=_build_project_target_enforcement, ) project = PROVIDERS.resource_api.get_project(project_id) self._expand_project_ref(project) @@ -117,9 +122,11 @@ class ProjectResource(ks_flask.ResourceBase): target = None if self.oslo_context.domain_id: target = {'domain_id': self.oslo_context.domain_id} - ENFORCER.enforce_call(action='identity:list_projects', - filters=filters, - target_attr=target) + ENFORCER.enforce_call( + action='identity:list_projects', + filters=filters, + target_attr=target, + ) hints = self.build_driver_hints(filters) # If 'is_domain' has not been included as a query, we default it to @@ -174,9 +181,8 @@ class ProjectResource(ks_flask.ResourceBase): project = self._normalize_dict(project) try: ref = PROVIDERS.resource_api.create_project( - project['id'], - project, - initiator=self.audit_initiator) + project['id'], project, initiator=self.audit_initiator + ) except (exception.DomainNotFound, exception.ProjectNotFound) as e: raise exception.ValidationError(e) return self.wrap_member(ref), http.client.CREATED @@ -188,15 +194,14 @@ class ProjectResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:update_project', - build_target=_build_project_target_enforcement + build_target=_build_project_target_enforcement, ) project = self.request_body_json.get('project', {}) validation.lazy_validate(schema.project_update, project) self._require_matching_id(project) ref = PROVIDERS.resource_api.update_project( - project_id, - project, - initiator=self.audit_initiator) + project_id, project, initiator=self.audit_initiator + ) return self.wrap_member(ref) def delete(self, project_id): @@ -206,11 +211,11 @@ class ProjectResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:delete_project', - build_target=_build_project_target_enforcement + build_target=_build_project_target_enforcement, ) PROVIDERS.resource_api.delete_project( - project_id, - initiator=self.audit_initiator) + project_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT @@ -218,7 +223,8 @@ class _ProjectTagResourceBase(ks_flask.ResourceBase): collection_key = 'projects' member_key = 'tags' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='resource_api', method='get_project_tag') + api='resource_api', method='get_project_tag' + ) @classmethod def wrap_member(cls, ref, collection_name=None, member_name=None): @@ -226,7 +232,7 @@ class _ProjectTagResourceBase(ks_flask.ResourceBase): # NOTE(gagehugo): Overriding this due to how the common controller # expects the ref to have an id, which for tags it does not. new_ref = {'links': {'self': ks_flask.full_url()}} - new_ref[member_name] = (ref or []) + new_ref[member_name] = ref or [] return new_ref @@ -238,7 +244,7 @@ class ProjectTagsResource(_ProjectTagResourceBase): """ ENFORCER.enforce_call( action='identity:list_project_tags', - build_target=_build_project_target_enforcement + build_target=_build_project_target_enforcement, ) ref = PROVIDERS.resource_api.list_project_tags(project_id) return self.wrap_member(ref) @@ -250,12 +256,13 @@ class ProjectTagsResource(_ProjectTagResourceBase): """ ENFORCER.enforce_call( action='identity:update_project_tags', - build_target=_build_project_target_enforcement + build_target=_build_project_target_enforcement, ) tags = self.request_body_json.get('tags', {}) validation.lazy_validate(schema.project_tags_update, tags) ref = PROVIDERS.resource_api.update_project_tags( - project_id, tags, initiator=self.audit_initiator) + project_id, tags, initiator=self.audit_initiator + ) return self.wrap_member(ref) def delete(self, project_id): @@ -265,7 +272,7 @@ class ProjectTagsResource(_ProjectTagResourceBase): """ ENFORCER.enforce_call( action='identity:delete_project_tags', - build_target=_build_project_target_enforcement + build_target=_build_project_target_enforcement, ) PROVIDERS.resource_api.update_project_tags(project_id, []) return None, http.client.NO_CONTENT @@ -291,7 +298,7 @@ class ProjectTagResource(_ProjectTagResourceBase): """ ENFORCER.enforce_call( action='identity:create_project_tag', - build_target=_build_project_target_enforcement + build_target=_build_project_target_enforcement, ) validation.lazy_validate(schema.project_tag_create, value) # Check if we will exceed the max number of tags on this project @@ -299,9 +306,7 @@ class ProjectTagResource(_ProjectTagResourceBase): tags.append(value) validation.lazy_validate(schema.project_tags_update, tags) PROVIDERS.resource_api.create_project_tag( - project_id, - value, - initiator=self.audit_initiator + project_id, value, initiator=self.audit_initiator ) url = '/'.join((ks_flask.base_url(), project_id, 'tags', value)) response = flask.make_response('', http.client.CREATED) @@ -315,7 +320,7 @@ class ProjectTagResource(_ProjectTagResourceBase): """ ENFORCER.enforce_call( action='identity:delete_project_tag', - build_target=_build_project_target_enforcement + build_target=_build_project_target_enforcement, ) PROVIDERS.resource_api.delete_project_tag(project_id, value) return None, http.client.NO_CONTENT @@ -325,17 +330,22 @@ class _ProjectGrantResourceBase(ks_flask.ResourceBase): collection_key = 'roles' member_key = 'role' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='role_api', method='get_role') + api='role_api', method='get_role' + ) @staticmethod def _check_if_inherited(): return flask.request.path.endswith('/inherited_to_projects') @staticmethod - def _build_enforcement_target_attr(role_id=None, user_id=None, - group_id=None, domain_id=None, - project_id=None, - allow_non_existing=False): + def _build_enforcement_target_attr( + role_id=None, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + allow_non_existing=False, + ): ref = {} if role_id: ref['role'] = PROVIDERS.role_api.get_role(role_id) @@ -368,13 +378,19 @@ class ProjectUserGrantResource(_ProjectGrantResourceBase): ENFORCER.enforce_call( action='identity:check_grant', build_target=functools.partial( - self._build_enforcement_target_attr, role_id=role_id, - project_id=project_id, user_id=user_id) + self._build_enforcement_target_attr, + role_id=role_id, + project_id=project_id, + user_id=user_id, + ), ) inherited = self._check_if_inherited() PROVIDERS.assignment_api.get_grant( - role_id=role_id, user_id=user_id, project_id=project_id, - inherited_to_projects=inherited) + role_id=role_id, + user_id=user_id, + project_id=project_id, + inherited_to_projects=inherited, + ) return None, http.client.NO_CONTENT def put(self, project_id, user_id, role_id): @@ -386,12 +402,19 @@ class ProjectUserGrantResource(_ProjectGrantResourceBase): action='identity:create_grant', build_target=functools.partial( self._build_enforcement_target_attr, - role_id=role_id, project_id=project_id, user_id=user_id) + role_id=role_id, + project_id=project_id, + user_id=user_id, + ), ) inherited = self._check_if_inherited() PROVIDERS.assignment_api.create_grant( - role_id=role_id, user_id=user_id, project_id=project_id, - inherited_to_projects=inherited, initiator=self.audit_initiator) + role_id=role_id, + user_id=user_id, + project_id=project_id, + inherited_to_projects=inherited, + initiator=self.audit_initiator, + ) return None, http.client.NO_CONTENT def delete(self, project_id, user_id, role_id): @@ -403,13 +426,20 @@ class ProjectUserGrantResource(_ProjectGrantResourceBase): action='identity:revoke_grant', build_target=functools.partial( self._build_enforcement_target_attr, - role_id=role_id, user_id=user_id, project_id=project_id, - allow_non_existing=True) + role_id=role_id, + user_id=user_id, + project_id=project_id, + allow_non_existing=True, + ), ) inherited = self._check_if_inherited() PROVIDERS.assignment_api.delete_grant( - role_id=role_id, user_id=user_id, project_id=project_id, - inherited_to_projects=inherited, initiator=self.audit_initiator) + role_id=role_id, + user_id=user_id, + project_id=project_id, + inherited_to_projects=inherited, + initiator=self.audit_initiator, + ) return None, http.client.NO_CONTENT @@ -423,12 +453,16 @@ class ProjectUserListGrantResource(_ProjectGrantResourceBase): action='identity:list_grants', build_target=functools.partial( self._build_enforcement_target_attr, - project_id=project_id, user_id=user_id) + project_id=project_id, + user_id=user_id, + ), ) inherited = self._check_if_inherited() refs = PROVIDERS.assignment_api.list_grants( - user_id=user_id, project_id=project_id, - inherited_to_projects=inherited) + user_id=user_id, + project_id=project_id, + inherited_to_projects=inherited, + ) return self.wrap_collection(refs) @@ -441,13 +475,19 @@ class ProjectGroupGrantResource(_ProjectGrantResourceBase): ENFORCER.enforce_call( action='identity:check_grant', build_target=functools.partial( - self._build_enforcement_target_attr, role_id=role_id, - project_id=project_id, group_id=group_id) + self._build_enforcement_target_attr, + role_id=role_id, + project_id=project_id, + group_id=group_id, + ), ) inherited = self._check_if_inherited() PROVIDERS.assignment_api.get_grant( - role_id=role_id, group_id=group_id, project_id=project_id, - inherited_to_projects=inherited) + role_id=role_id, + group_id=group_id, + project_id=project_id, + inherited_to_projects=inherited, + ) return None, http.client.NO_CONTENT def put(self, project_id, group_id, role_id): @@ -459,12 +499,19 @@ class ProjectGroupGrantResource(_ProjectGrantResourceBase): action='identity:create_grant', build_target=functools.partial( self._build_enforcement_target_attr, - role_id=role_id, project_id=project_id, group_id=group_id) + role_id=role_id, + project_id=project_id, + group_id=group_id, + ), ) inherited = self._check_if_inherited() PROVIDERS.assignment_api.create_grant( - role_id=role_id, group_id=group_id, project_id=project_id, - inherited_to_projects=inherited, initiator=self.audit_initiator) + role_id=role_id, + group_id=group_id, + project_id=project_id, + inherited_to_projects=inherited, + initiator=self.audit_initiator, + ) return None, http.client.NO_CONTENT def delete(self, project_id, group_id, role_id): @@ -476,13 +523,20 @@ class ProjectGroupGrantResource(_ProjectGrantResourceBase): action='identity:revoke_grant', build_target=functools.partial( self._build_enforcement_target_attr, - role_id=role_id, group_id=group_id, project_id=project_id, - allow_non_existing=True) + role_id=role_id, + group_id=group_id, + project_id=project_id, + allow_non_existing=True, + ), ) inherited = self._check_if_inherited() PROVIDERS.assignment_api.delete_grant( - role_id=role_id, group_id=group_id, project_id=project_id, - inherited_to_projects=inherited, initiator=self.audit_initiator) + role_id=role_id, + group_id=group_id, + project_id=project_id, + inherited_to_projects=inherited, + initiator=self.audit_initiator, + ) return None, http.client.NO_CONTENT @@ -496,12 +550,16 @@ class ProjectGroupListGrantResource(_ProjectGrantResourceBase): action='identity:list_grants', build_target=functools.partial( self._build_enforcement_target_attr, - project_id=project_id, group_id=group_id) + project_id=project_id, + group_id=group_id, + ), ) inherited = self._check_if_inherited() refs = PROVIDERS.assignment_api.list_grants( - group_id=group_id, project_id=project_id, - inherited_to_projects=inherited) + group_id=group_id, + project_id=project_id, + inherited_to_projects=inherited, + ) return self.wrap_collection(refs) @@ -515,8 +573,7 @@ class ProjectAPI(ks_flask.APIBase): url='/projects//tags', resource_kwargs={}, rel='project_tags', - path_vars={ - 'project_id': json_home.Parameters.PROJECT_ID} + path_vars={'project_id': json_home.Parameters.PROJECT_ID}, ), ks_flask.construct_resource_map( resource=ProjectTagResource, @@ -525,18 +582,21 @@ class ProjectAPI(ks_flask.APIBase): rel='project_tags', path_vars={ 'project_id': json_home.Parameters.PROJECT_ID, - 'value': json_home.Parameters.TAG_VALUE} + 'value': json_home.Parameters.TAG_VALUE, + }, ), ks_flask.construct_resource_map( resource=ProjectUserGrantResource, - url=('/projects//users//' - 'roles/'), + url=( + '/projects//users//' + 'roles/' + ), resource_kwargs={}, rel='project_user_role', path_vars={ 'project_id': json_home.Parameters.PROJECT_ID, 'user_id': json_home.Parameters.USER_ID, - 'role_id': json_home.Parameters.ROLE_ID + 'role_id': json_home.Parameters.ROLE_ID, }, ), ks_flask.construct_resource_map( @@ -546,19 +606,21 @@ class ProjectAPI(ks_flask.APIBase): rel='project_user_roles', path_vars={ 'project_id': json_home.Parameters.PROJECT_ID, - 'user_id': json_home.Parameters.USER_ID - } + 'user_id': json_home.Parameters.USER_ID, + }, ), ks_flask.construct_resource_map( resource=ProjectGroupGrantResource, - url=('/projects//groups//' - 'roles/'), + url=( + '/projects//groups//' + 'roles/' + ), resource_kwargs={}, rel='project_group_role', path_vars={ 'project_id': json_home.Parameters.PROJECT_ID, 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID + 'role_id': json_home.Parameters.ROLE_ID, }, ), ks_flask.construct_resource_map( @@ -568,7 +630,7 @@ class ProjectAPI(ks_flask.APIBase): rel='project_group_roles', path_vars={ 'project_id': json_home.Parameters.PROJECT_ID, - 'group_id': json_home.Parameters.GROUP_ID + 'group_id': json_home.Parameters.GROUP_ID, }, ), ] diff --git a/keystone/api/regions.py b/keystone/api/regions.py index 37f17e827f..7e162dd012 100644 --- a/keystone/api/regions.py +++ b/keystone/api/regions.py @@ -58,7 +58,8 @@ class RegionResource(ks_flask.ResourceBase): # both ways. region = self._assign_unique_id(region) ref = PROVIDERS.catalog_api.create_region( - region, initiator=self.audit_initiator) + region, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def put(self, region_id): @@ -70,13 +71,16 @@ class RegionResource(ks_flask.ResourceBase): region['id'] = region_id elif region_id != region.get('id'): raise exception.ValidationError( - _('Conflicting region IDs specified: ' - '"%(url_id)s" != "%(ref_id)s"') % { - 'url_id': region_id, - 'ref_id': region['id']}) + _( + 'Conflicting region IDs specified: ' + '"%(url_id)s" != "%(ref_id)s"' + ) + % {'url_id': region_id, 'ref_id': region['id']} + ) ref = PROVIDERS.catalog_api.create_region( - region, initiator=self.audit_initiator) + region, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def patch(self, region_id): @@ -84,13 +88,20 @@ class RegionResource(ks_flask.ResourceBase): region = self.request_body_json.get('region') validation.lazy_validate(schema.region_update, region) self._require_matching_id(region) - return self.wrap_member(PROVIDERS.catalog_api.update_region( - region_id, region, initiator=self.audit_initiator)) + return self.wrap_member( + PROVIDERS.catalog_api.update_region( + region_id, region, initiator=self.audit_initiator + ) + ) def delete(self, region_id): ENFORCER.enforce_call(action='identity:delete_region') - return PROVIDERS.catalog_api.delete_region( - region_id, initiator=self.audit_initiator), http.client.NO_CONTENT + return ( + PROVIDERS.catalog_api.delete_region( + region_id, initiator=self.audit_initiator + ), + http.client.NO_CONTENT, + ) class RegionAPI(ks_flask.APIBase): diff --git a/keystone/api/registered_limits.py b/keystone/api/registered_limits.py index 1d66f3be7a..b3ad8c78c0 100644 --- a/keystone/api/registered_limits.py +++ b/keystone/api/registered_limits.py @@ -35,13 +35,15 @@ class RegisteredLimitResource(ks_flask.ResourceBase): def _get_registered_limit(self, registered_limit_id): ENFORCER.enforce_call(action='identity:get_registered_limit') ref = PROVIDERS.unified_limit_api.get_registered_limit( - registered_limit_id) + registered_limit_id + ) return self.wrap_member(ref) def _list_registered_limits(self): filters = ['service_id', 'region_id', 'resource_name'] - ENFORCER.enforce_call(action='identity:list_registered_limits', - filters=filters) + ENFORCER.enforce_call( + action='identity:list_registered_limits', filters=filters + ) hints = self.build_driver_hints(filters) refs = PROVIDERS.unified_limit_api.list_registered_limits(hints) return self.wrap_collection(refs, hints=hints) @@ -53,32 +55,42 @@ class RegisteredLimitResource(ks_flask.ResourceBase): def post(self): ENFORCER.enforce_call(action='identity:create_registered_limits') - reg_limits = (flask.request.get_json( - silent=True, force=True) or {}).get('registered_limits', {}) + reg_limits = ( + flask.request.get_json(silent=True, force=True) or {} + ).get('registered_limits', {}) validation.lazy_validate(schema.registered_limit_create, reg_limits) - registered_limits = [self._assign_unique_id(self._normalize_dict(r)) - for r in reg_limits] + registered_limits = [ + self._assign_unique_id(self._normalize_dict(r)) for r in reg_limits + ] refs = PROVIDERS.unified_limit_api.create_registered_limits( - registered_limits) + registered_limits + ) refs = self.wrap_collection(refs) refs.pop('links') return refs, http.client.CREATED def patch(self, registered_limit_id): ENFORCER.enforce_call(action='identity:update_registered_limit') - registered_limit = (flask.request.get_json( - silent=True, force=True) or {}).get('registered_limit', {}) - validation.lazy_validate(schema.registered_limit_update, - registered_limit) + registered_limit = ( + flask.request.get_json(silent=True, force=True) or {} + ).get('registered_limit', {}) + validation.lazy_validate( + schema.registered_limit_update, registered_limit + ) self._require_matching_id(registered_limit) ref = PROVIDERS.unified_limit_api.update_registered_limit( - registered_limit_id, registered_limit) + registered_limit_id, registered_limit + ) return self.wrap_member(ref) def delete(self, registered_limit_id): ENFORCER.enforce_call(action='identity:delete_registered_limit') - return (PROVIDERS.unified_limit_api.delete_registered_limit( - registered_limit_id), http.client.NO_CONTENT) + return ( + PROVIDERS.unified_limit_api.delete_registered_limit( + registered_limit_id + ), + http.client.NO_CONTENT, + ) class RegisteredLimitsAPI(ks_flask.APIBase): diff --git a/keystone/api/role_assignments.py b/keystone/api/role_assignments.py index df2fcc21e6..b90ca0f2e1 100644 --- a/keystone/api/role_assignments.py +++ b/keystone/api/role_assignments.py @@ -46,8 +46,13 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): def _list_role_assignments(self): filters = [ - 'group.id', 'role.id', 'scope.domain.id', 'scope.project.id', - 'scope.OS-INHERIT:inherited_to', 'user.id', 'scope.system' + 'group.id', + 'role.id', + 'scope.domain.id', + 'scope.project.id', + 'scope.OS-INHERIT:inherited_to', + 'user.id', + 'scope.system', ] target = None if self.oslo_context.domain_id: @@ -58,9 +63,11 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): # so we reflect the domain_id from the context into the target # to validate domain-scoped tokens. target = {'domain_id': self.oslo_context.domain_id} - ENFORCER.enforce_call(action='identity:list_role_assignments', - filters=filters, - target_attr=target) + ENFORCER.enforce_call( + action='identity:list_role_assignments', + filters=filters, + target_attr=target, + ) assignments = self._build_role_assignments_list() @@ -83,8 +90,12 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): def _list_role_assignments_for_tree(self): filters = [ - 'group.id', 'role.id', 'scope.domain.id', 'scope.project.id', - 'scope.OS-INHERIT:inherited_to', 'user.id' + 'group.id', + 'role.id', + 'scope.domain.id', + 'scope.project.id', + 'scope.OS-INHERIT:inherited_to', + 'user.id', ] project_id = flask.request.args.get('scope.project.id') target = None @@ -95,11 +106,16 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): # Add target.domain_id to validate domain-scoped tokens target['domain_id'] = target['project']['domain_id'] - ENFORCER.enforce_call(action='identity:list_role_assignments_for_tree', - filters=filters, target_attr=target) + ENFORCER.enforce_call( + action='identity:list_role_assignments_for_tree', + filters=filters, + target_attr=target, + ) if not project_id: - msg = _('scope.project.id must be specified if include_subtree ' - 'is also specified') + msg = _( + 'scope.project.id must be specified if include_subtree ' + 'is also specified' + ) raise exception.ValidationError(message=msg) return self._build_role_assignments_list(include_subtree=True) @@ -144,31 +160,36 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): include_subtree=include_subtree, inherited=self._inherited, effective=self._effective, - include_names=include_names) + include_names=include_names, + ) formatted_refs = [self._format_entity(ref) for ref in refs] return self.wrap_collection(formatted_refs) def _assert_domain_nand_project(self): - if (flask.request.args.get('scope.domain.id') and - flask.request.args.get('scope.project.id')): + if flask.request.args.get( + 'scope.domain.id' + ) and flask.request.args.get('scope.project.id'): msg = _('Specify a domain or project, not both') raise exception.ValidationError(msg) def _assert_system_nand_domain(self): - if (flask.request.args.get('scope.domain.id') and - flask.request.args.get('scope.system')): + if flask.request.args.get( + 'scope.domain.id' + ) and flask.request.args.get('scope.system'): msg = _('Specify system or domain, not both') raise exception.ValidationError(msg) def _assert_system_nand_project(self): - if (flask.request.args.get('scope.project.id') and - flask.request.args.get('scope.system')): + if flask.request.args.get( + 'scope.project.id' + ) and flask.request.args.get('scope.system'): msg = _('Specify system or project, not both') raise exception.ValidationError(msg) def _assert_user_nand_group(self): - if (flask.request.args.get('user.id') and - flask.request.args.get('group.id')): + if flask.request.args.get('user.id') and flask.request.args.get( + 'group.id' + ): msg = _('Specify a user or group, not both') raise exception.ValidationError(msg) @@ -184,14 +205,17 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): """ if self._effective: if flask.request.args.get('group.id'): - msg = _('Combining effective and group filter will always ' - 'result in an empty list.') + msg = _( + 'Combining effective and group filter will always ' + 'result in an empty list.' + ) raise exception.ValidationError(msg) if self._inherited and flask.request.args.get('scope.domain.id'): msg = _( 'Combining effective, domain and inherited filters will ' - 'always result in an empty list.') + 'always result in an empty list.' + ) raise exception.ValidationError(msg) @property @@ -275,33 +299,45 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): if 'project_id' in entity: if 'project_name' in entity: - formatted_entity['scope'] = {'project': { - 'id': entity['project_id'], - 'name': entity['project_name'], - 'domain': {'id': entity['project_domain_id'], - 'name': entity['project_domain_name']}}} + formatted_entity['scope'] = { + 'project': { + 'id': entity['project_id'], + 'name': entity['project_name'], + 'domain': { + 'id': entity['project_domain_id'], + 'name': entity['project_domain_name'], + }, + } + } else: formatted_entity['scope'] = { - 'project': {'id': entity['project_id']}} + 'project': {'id': entity['project_id']} + } if 'domain_id' in entity.get('indirect', {}): inherited_assignment = True - formatted_link = ('/domains/%s' % - entity['indirect']['domain_id']) + formatted_link = ( + '/domains/%s' % entity['indirect']['domain_id'] + ) elif 'project_id' in entity.get('indirect', {}): inherited_assignment = True - formatted_link = ('/projects/%s' % - entity['indirect']['project_id']) + formatted_link = ( + '/projects/%s' % entity['indirect']['project_id'] + ) else: formatted_link = '/projects/%s' % entity['project_id'] elif 'domain_id' in entity: if 'domain_name' in entity: formatted_entity['scope'] = { - 'domain': {'id': entity['domain_id'], - 'name': entity['domain_name']}} + 'domain': { + 'id': entity['domain_id'], + 'name': entity['domain_name'], + } + } else: formatted_entity['scope'] = { - 'domain': {'id': entity['domain_id']}} + 'domain': {'id': entity['domain_id']} + } formatted_link = '/domains/%s' % entity['domain_id'] elif 'system' in entity: formatted_link = '/system' @@ -312,14 +348,18 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): formatted_entity['user'] = { 'id': entity['user_id'], 'name': entity['user_name'], - 'domain': {'id': entity['user_domain_id'], - 'name': entity['user_domain_name']}} + 'domain': { + 'id': entity['user_domain_id'], + 'name': entity['user_domain_name'], + }, + } else: formatted_entity['user'] = {'id': entity['user_id']} if 'group_id' in entity.get('indirect', {}): - membership_url = ( - ks_flask.base_url(path='/groups/%s/users/%s' % ( - entity['indirect']['group_id'], entity['user_id']))) + membership_url = ks_flask.base_url( + path='/groups/%s/users/%s' + % (entity['indirect']['group_id'], entity['user_id']) + ) formatted_entity['links']['membership'] = membership_url formatted_link += '/groups/%s' % entity['indirect']['group_id'] else: @@ -329,43 +369,54 @@ class RoleAssignmentsResource(ks_flask.ResourceBase): formatted_entity['group'] = { 'id': entity['group_id'], 'name': entity['group_name'], - 'domain': {'id': entity['group_domain_id'], - 'name': entity['group_domain_name']}} + 'domain': { + 'id': entity['group_domain_id'], + 'name': entity['group_domain_name'], + }, + } else: formatted_entity['group'] = {'id': entity['group_id']} formatted_link += '/groups/%s' % entity['group_id'] if 'role_name' in entity: - formatted_entity['role'] = {'id': entity['role_id'], - 'name': entity['role_name']} + formatted_entity['role'] = { + 'id': entity['role_id'], + 'name': entity['role_name'], + } if 'role_domain_id' in entity and 'role_domain_name' in entity: formatted_entity['role'].update( - {'domain': {'id': entity['role_domain_id'], - 'name': entity['role_domain_name']}}) + { + 'domain': { + 'id': entity['role_domain_id'], + 'name': entity['role_domain_name'], + } + } + ) else: formatted_entity['role'] = {'id': entity['role_id']} prior_role_link = '' if 'role_id' in entity.get('indirect', {}): formatted_link += '/roles/%s' % entity['indirect']['role_id'] - prior_role_link = ( - '/prior_role/%(prior)s/implies/%(implied)s' % { - 'prior': entity['role_id'], - 'implied': entity['indirect']['role_id'] - }) + prior_role_link = '/prior_role/%(prior)s/implies/%(implied)s' % { + 'prior': entity['role_id'], + 'implied': entity['indirect']['role_id'], + } else: formatted_link += '/roles/%s' % entity['role_id'] if inherited_assignment: - formatted_entity['scope']['OS-INHERIT:inherited_to'] = ( - 'projects') - formatted_link = ('/OS-INHERIT%s/inherited_to_projects' % - formatted_link) + formatted_entity['scope']['OS-INHERIT:inherited_to'] = 'projects' + formatted_link = ( + '/OS-INHERIT%s/inherited_to_projects' % formatted_link + ) formatted_entity['links']['assignment'] = ks_flask.base_url( - path=formatted_link) + path=formatted_link + ) if prior_role_link: - formatted_entity['links']['prior_role'] = ( - ks_flask.base_url(path=prior_role_link)) + formatted_entity['links']['prior_role'] = ks_flask.base_url( + path=prior_role_link + ) return formatted_entity @@ -379,7 +430,8 @@ class RoleAssignmentsAPI(ks_flask.APIBase): resource=RoleAssignmentsResource, url='/role_assignments', resource_kwargs={}, - rel='role_assignments') + rel='role_assignments', + ) ] diff --git a/keystone/api/role_inferences.py b/keystone/api/role_inferences.py index a7f501425b..b57a5f4884 100644 --- a/keystone/api/role_inferences.py +++ b/keystone/api/role_inferences.py @@ -32,8 +32,10 @@ class RoleInferencesResource(flask_restful.Resource): """ ENFORCER.enforce_call(action='identity:list_role_inference_rules') refs = PROVIDERS.role_api.list_role_inference_rules() - role_dict = {role_ref['id']: role_ref - for role_ref in PROVIDERS.role_api.list_roles()} + role_dict = { + role_ref['id']: role_ref + for role_ref in PROVIDERS.role_api.list_roles() + } rules = dict() for ref in refs: @@ -42,15 +44,22 @@ class RoleInferencesResource(flask_restful.Resource): implied = rules.get(prior_role_id, []) implied.append( shared.build_implied_role_response_data( - role_dict[implied_role_id])) + role_dict[implied_role_id] + ) + ) rules[prior_role_id] = implied inferences = [] - for prior_id, implied, in rules.items(): + for ( + prior_id, + implied, + ) in rules.items(): prior_response = shared.build_prior_role_response_data( - prior_id, role_dict[prior_id]['name']) - inferences.append({'prior_role': prior_response, - 'implies': implied}) + prior_id, role_dict[prior_id]['name'] + ) + inferences.append( + {'prior_role': prior_response, 'implies': implied} + ) results = {'role_inferences': inferences} return results @@ -64,7 +73,8 @@ class RoleInferencesAPI(ks_flask.APIBase): resource=RoleInferencesResource, url='/role_inferences', resource_kwargs={}, - rel='role_inferences') + rel='role_inferences', + ) ] diff --git a/keystone/api/roles.py b/keystone/api/roles.py index ee3a903e76..3f9be46066 100644 --- a/keystone/api/roles.py +++ b/keystone/api/roles.py @@ -35,7 +35,8 @@ class RoleResource(ks_flask.ResourceBase): collection_key = 'roles' member_key = 'role' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='role_api', method='get_role') + api='role_api', method='get_role' + ) def _is_domain_role(self, role): return bool(role.get('domain_id')) @@ -72,20 +73,24 @@ class RoleResource(ks_flask.ResourceBase): # reraise the error after enforcement if needed. raise err else: - ENFORCER.enforce_call(action='identity:get_domain_role', - member_target_type='role', - member_target=role) + ENFORCER.enforce_call( + action='identity:get_domain_role', + member_target_type='role', + member_target=role, + ) return self.wrap_member(role) def _list_roles(self): filters = ['name', 'domain_id'] domain_filter = flask.request.args.get('domain_id') if domain_filter: - ENFORCER.enforce_call(action='identity:list_domain_roles', - filters=filters) + ENFORCER.enforce_call( + action='identity:list_domain_roles', filters=filters + ) else: - ENFORCER.enforce_call(action='identity:list_roles', - filters=filters) + ENFORCER.enforce_call( + action='identity:list_roles', filters=filters + ) hints = self.build_driver_hints(filters) if not domain_filter: @@ -113,7 +118,8 @@ class RoleResource(ks_flask.ResourceBase): role = self._assign_unique_id(role) role = self._normalize_dict(role) ref = PROVIDERS.role_api.create_role( - role['id'], role, initiator=self.audit_initiator) + role['id'], role, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def patch(self, role_id): @@ -136,14 +142,17 @@ class RoleResource(ks_flask.ResourceBase): if err: raise err else: - ENFORCER.enforce_call(action='identity:update_domain_role', - member_target_type='role', - member_target=role) + ENFORCER.enforce_call( + action='identity:update_domain_role', + member_target_type='role', + member_target=role, + ) request_body_role = self.request_body_json.get('role', {}) validation.lazy_validate(schema.role_update, request_body_role) self._require_matching_id(request_body_role) ref = PROVIDERS.role_api.update_role( - role_id, request_body_role, initiator=self.audit_initiator) + role_id, request_body_role, initiator=self.audit_initiator + ) return self.wrap_member(ref) def delete(self, role_id): @@ -166,9 +175,11 @@ class RoleResource(ks_flask.ResourceBase): if err: raise err else: - ENFORCER.enforce_call(action='identity:delete_domain_role', - member_target_type='role', - member_target=role) + ENFORCER.enforce_call( + action='identity:delete_domain_role', + member_target_type='role', + member_target=role, + ) PROVIDERS.role_api.delete_role(role_id, initiator=self.audit_initiator) return None, http.client.NO_CONTENT @@ -177,10 +188,12 @@ def _build_enforcement_target_ref(): ref = {} if flask.request.view_args: ref['prior_role'] = PROVIDERS.role_api.get_role( - flask.request.view_args.get('prior_role_id')) + flask.request.view_args.get('prior_role_id') + ) if flask.request.view_args.get('implied_role_id'): ref['implied_role'] = PROVIDERS.role_api.get_role( - flask.request.view_args['implied_role_id']) + flask.request.view_args['implied_role_id'] + ) return ref @@ -190,8 +203,10 @@ class RoleImplicationListResource(flask_restful.Resource): GET/HEAD /v3/roles/{prior_role_id}/implies """ - ENFORCER.enforce_call(action='identity:list_implied_roles', - build_target=_build_enforcement_target_ref) + ENFORCER.enforce_call( + action='identity:list_implied_roles', + build_target=_build_enforcement_target_ref, + ) ref = PROVIDERS.role_api.list_implied_roles(prior_role_id) implied_ids = [r['implied_role_id'] for r in ref] response_json = shared.role_inference_response(prior_role_id) @@ -199,10 +214,11 @@ class RoleImplicationListResource(flask_restful.Resource): for implied_id in implied_ids: implied_role = PROVIDERS.role_api.get_role(implied_id) response_json['role_inference']['implies'].append( - shared.build_implied_role_response_data(implied_role)) + shared.build_implied_role_response_data(implied_role) + ) response_json['links'] = { - 'self': ks_flask.base_url( - path='/roles/%s/implies' % prior_role_id)} + 'self': ks_flask.base_url(path='/roles/%s/implies' % prior_role_id) + } return response_json @@ -215,8 +231,10 @@ class RoleImplicationResource(flask_restful.Resource): # consistent policy enforcement behavior even if it is superfluous. # Alternatively we can keep check_implied_role and reference # ._get_implied_role instead. - ENFORCER.enforce_call(action='identity:check_implied_role', - build_target=_build_enforcement_target_ref) + ENFORCER.enforce_call( + action='identity:check_implied_role', + build_target=_build_enforcement_target_ref, + ) self.get(prior_role_id, implied_role_id) # NOTE(morgan): Our API here breaks HTTP Spec. This should be evaluated # for a future fix. This should just return the above "get" however, @@ -231,22 +249,24 @@ class RoleImplicationResource(flask_restful.Resource): """ ENFORCER.enforce_call( action='identity:get_implied_role', - build_target=_build_enforcement_target_ref) + build_target=_build_enforcement_target_ref, + ) return self._get_implied_role(prior_role_id, implied_role_id) def _get_implied_role(self, prior_role_id, implied_role_id): # Isolate this logic so it can be re-used without added enforcement - PROVIDERS.role_api.get_implied_role( - prior_role_id, implied_role_id) + PROVIDERS.role_api.get_implied_role(prior_role_id, implied_role_id) implied_role_ref = PROVIDERS.role_api.get_role(implied_role_id) response_json = shared.role_inference_response(prior_role_id) - response_json['role_inference'][ - 'implies'] = shared.build_implied_role_response_data( - implied_role_ref) + response_json['role_inference']['implies'] = ( + shared.build_implied_role_response_data(implied_role_ref) + ) response_json['links'] = { 'self': ks_flask.base_url( - path='/roles/%(prior)s/implies/%(implies)s' % { - 'prior': prior_role_id, 'implies': implied_role_id})} + path='/roles/%(prior)s/implies/%(implies)s' + % {'prior': prior_role_id, 'implies': implied_role_id} + ) + } return response_json def put(self, prior_role_id, implied_role_id): @@ -254,8 +274,10 @@ class RoleImplicationResource(flask_restful.Resource): PUT /v3/roles/{prior_role_id}/implies/{implied_role_id} """ - ENFORCER.enforce_call(action='identity:create_implied_role', - build_target=_build_enforcement_target_ref) + ENFORCER.enforce_call( + action='identity:create_implied_role', + build_target=_build_enforcement_target_ref, + ) PROVIDERS.role_api.create_implied_role(prior_role_id, implied_role_id) response_json = self._get_implied_role(prior_role_id, implied_role_id) return response_json, http.client.CREATED @@ -265,8 +287,10 @@ class RoleImplicationResource(flask_restful.Resource): DELETE /v3/roles/{prior_role_id}/implies/{implied_role_id} """ - ENFORCER.enforce_call(action='identity:delete_implied_role', - build_target=_build_enforcement_target_ref) + ENFORCER.enforce_call( + action='identity:delete_implied_role', + build_target=_build_enforcement_target_ref, + ) PROVIDERS.role_api.delete_implied_role(prior_role_id, implied_role_id) return None, http.client.NO_CONTENT @@ -281,16 +305,21 @@ class RoleAPI(ks_flask.APIBase): url='/roles//implies', resource_kwargs={}, rel='implied_roles', - path_vars={'prior_role_id': json_home.Parameters.ROLE_ID}), + path_vars={'prior_role_id': json_home.Parameters.ROLE_ID}, + ), ks_flask.construct_resource_map( resource=RoleImplicationResource, resource_kwargs={}, - url=('/roles//' - 'implies/'), + url=( + '/roles//' + 'implies/' + ), rel='implied_role', path_vars={ 'prior_role_id': json_home.Parameters.ROLE_ID, - 'implied_role_id': json_home.Parameters.ROLE_ID}) + 'implied_role_id': json_home.Parameters.ROLE_ID, + }, + ), ] diff --git a/keystone/api/s3tokens.py b/keystone/api/s3tokens.py index 4a8439d696..6a2ba0db43 100644 --- a/keystone/api/s3tokens.py +++ b/keystone/api/s3tokens.py @@ -39,8 +39,11 @@ def _calculate_signature_v1(string_to_sign, secret_key): """ key = str(secret_key).encode('utf-8') b64_encode = base64.encodebytes - signed = b64_encode(hmac.new(key, string_to_sign, hashlib.sha1) - .digest()).decode('utf-8').strip() + signed = ( + b64_encode(hmac.new(key, string_to_sign, hashlib.sha1).digest()) + .decode('utf-8') + .strip() + ) return signed @@ -80,15 +83,18 @@ class S3Resource(EC2_S3_Resource.ResourceBase): string_to_sign = base64.urlsafe_b64decode(str(credentials['token'])) if string_to_sign[0:4] != b'AWS4': - signature = _calculate_signature_v1(string_to_sign, - creds_ref['secret']) + signature = _calculate_signature_v1( + string_to_sign, creds_ref['secret'] + ) else: - signature = _calculate_signature_v4(string_to_sign, - creds_ref['secret']) + signature = _calculate_signature_v4( + string_to_sign, creds_ref['secret'] + ) if not utils.auth_str_equal(credentials['signature'], signature): raise exception.Unauthorized( - message=_('Credential signature mismatch')) + message=_('Credential signature mismatch') + ) @ks_flask.unenforced_api def post(self): @@ -115,7 +121,9 @@ class S3Api(ks_flask.APIBase): resource_kwargs={}, rel='s3tokens', resource_relation_func=( - json_home_relations.s3_token_resource_rel_func)) + json_home_relations.s3_token_resource_rel_func + ), + ) ] diff --git a/keystone/api/services.py b/keystone/api/services.py index 207deaac99..4d223918b2 100644 --- a/keystone/api/services.py +++ b/keystone/api/services.py @@ -51,7 +51,8 @@ class ServicesResource(ks_flask.ResourceBase): validation.lazy_validate(schema.service_create, service) service = self._assign_unique_id(self._normalize_dict(service)) ref = PROVIDERS.catalog_api.create_service( - service['id'], service, initiator=self.audit_initiator) + service['id'], service, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def patch(self, service_id): @@ -60,13 +61,18 @@ class ServicesResource(ks_flask.ResourceBase): validation.lazy_validate(schema.service_update, service) self._require_matching_id(service) ref = PROVIDERS.catalog_api.update_service( - service_id, service, initiator=self.audit_initiator) + service_id, service, initiator=self.audit_initiator + ) return self.wrap_member(ref) def delete(self, service_id): ENFORCER.enforce_call(action='identity:delete_service') - return PROVIDERS.catalog_api.delete_service( - service_id, initiator=self.audit_initiator), http.client.NO_CONTENT + return ( + PROVIDERS.catalog_api.delete_service( + service_id, initiator=self.audit_initiator + ), + http.client.NO_CONTENT, + ) class ServiceAPI(ks_flask.APIBase): diff --git a/keystone/api/system.py b/keystone/api/system.py index c68151868f..2263a7ac25 100644 --- a/keystone/api/system.py +++ b/keystone/api/system.py @@ -33,18 +33,21 @@ def _build_enforcement_target(allow_non_existing=False): if flask.request.view_args: if flask.request.view_args.get('role_id'): target['role'] = PROVIDERS.role_api.get_role( - flask.request.view_args['role_id']) + flask.request.view_args['role_id'] + ) if flask.request.view_args.get('user_id'): try: target['user'] = PROVIDERS.identity_api.get_user( - flask.request.view_args['user_id']) + flask.request.view_args['user_id'] + ) except exception.UserNotFound: if not allow_non_existing: raise else: try: target['group'] = PROVIDERS.identity_api.get_group( - flask.request.view_args.get('group_id')) + flask.request.view_args.get('group_id') + ) except exception.GroupNotFound: if not allow_non_existing: raise @@ -57,11 +60,14 @@ class SystemUsersListResource(flask_restful.Resource): GET/HEAD /system/users/{user_id}/roles """ - ENFORCER.enforce_call(action='identity:list_system_grants_for_user', - build_target=_build_enforcement_target) + ENFORCER.enforce_call( + action='identity:list_system_grants_for_user', + build_target=_build_enforcement_target, + ) refs = PROVIDERS.assignment_api.list_system_grants_for_user(user_id) return ks_flask.ResourceBase.wrap_collection( - refs, collection_name='roles') + refs, collection_name='roles' + ) class SystemUsersResource(flask_restful.Resource): @@ -70,8 +76,10 @@ class SystemUsersResource(flask_restful.Resource): GET/HEAD /system/users/{user_id}/roles/{role_id} """ - ENFORCER.enforce_call(action='identity:check_system_grant_for_user', - build_target=_build_enforcement_target) + ENFORCER.enforce_call( + action='identity:check_system_grant_for_user', + build_target=_build_enforcement_target, + ) PROVIDERS.assignment_api.check_system_grant_for_user(user_id, role_id) return None, http.client.NO_CONTENT @@ -80,8 +88,10 @@ class SystemUsersResource(flask_restful.Resource): PUT /system/users/{user_id}/roles/{role_id} """ - ENFORCER.enforce_call(action='identity:create_system_grant_for_user', - build_target=_build_enforcement_target) + ENFORCER.enforce_call( + action='identity:create_system_grant_for_user', + build_target=_build_enforcement_target, + ) PROVIDERS.assignment_api.create_system_grant_for_user(user_id, role_id) return None, http.client.NO_CONTENT @@ -93,8 +103,9 @@ class SystemUsersResource(flask_restful.Resource): ENFORCER.enforce_call( action='identity:revoke_system_grant_for_user', build_target=functools.partial( - _build_enforcement_target, - allow_non_existing=True)) + _build_enforcement_target, allow_non_existing=True + ), + ) PROVIDERS.assignment_api.delete_system_grant_for_user(user_id, role_id) return None, http.client.NO_CONTENT @@ -105,11 +116,14 @@ class SystemGroupsRolesListResource(flask_restful.Resource): GET/HEAD /system/groups/{group_id}/roles """ - ENFORCER.enforce_call(action='identity:list_system_grants_for_group', - build_target=_build_enforcement_target) + ENFORCER.enforce_call( + action='identity:list_system_grants_for_group', + build_target=_build_enforcement_target, + ) refs = PROVIDERS.assignment_api.list_system_grants_for_group(group_id) return ks_flask.ResourceBase.wrap_collection( - refs, collection_name='roles') + refs, collection_name='roles' + ) class SystemGroupsRolestResource(flask_restful.Resource): @@ -118,10 +132,13 @@ class SystemGroupsRolestResource(flask_restful.Resource): GET/HEAD /system/groups/{group_id}/roles/{role_id} """ - ENFORCER.enforce_call(action='identity:check_system_grant_for_group', - build_target=_build_enforcement_target) + ENFORCER.enforce_call( + action='identity:check_system_grant_for_group', + build_target=_build_enforcement_target, + ) PROVIDERS.assignment_api.check_system_grant_for_group( - group_id, role_id) + group_id, role_id + ) return None, http.client.NO_CONTENT def put(self, group_id, role_id): @@ -129,10 +146,13 @@ class SystemGroupsRolestResource(flask_restful.Resource): PUT /system/groups/{group_id}/roles/{role_id} """ - ENFORCER.enforce_call(action='identity:create_system_grant_for_group', - build_target=_build_enforcement_target) + ENFORCER.enforce_call( + action='identity:create_system_grant_for_group', + build_target=_build_enforcement_target, + ) PROVIDERS.assignment_api.create_system_grant_for_group( - group_id, role_id) + group_id, role_id + ) return None, http.client.NO_CONTENT def delete(self, group_id, role_id): @@ -143,10 +163,12 @@ class SystemGroupsRolestResource(flask_restful.Resource): ENFORCER.enforce_call( action='identity:revoke_system_grant_for_group', build_target=functools.partial( - _build_enforcement_target, - allow_non_existing=True)) + _build_enforcement_target, allow_non_existing=True + ), + ) PROVIDERS.assignment_api.delete_system_grant_for_group( - group_id, role_id) + group_id, role_id + ) return None, http.client.NO_CONTENT @@ -160,7 +182,8 @@ class SystemAPI(ks_flask.APIBase): url='/system/users//roles', resource_kwargs={}, rel='system_user_roles', - path_vars={'user_id': json_home.Parameters.USER_ID}), + path_vars={'user_id': json_home.Parameters.USER_ID}, + ), ks_flask.construct_resource_map( resource=SystemUsersResource, url='/system/users//roles/', @@ -168,13 +191,16 @@ class SystemAPI(ks_flask.APIBase): rel='system_user_role', path_vars={ 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID}), + 'user_id': json_home.Parameters.USER_ID, + }, + ), ks_flask.construct_resource_map( resource=SystemGroupsRolesListResource, url='/system/groups//roles', resource_kwargs={}, rel='system_group_roles', - path_vars={'group_id': json_home.Parameters.GROUP_ID}), + path_vars={'group_id': json_home.Parameters.GROUP_ID}, + ), ks_flask.construct_resource_map( resource=SystemGroupsRolestResource, url='/system/groups//roles/', @@ -182,7 +208,9 @@ class SystemAPI(ks_flask.APIBase): rel='system_group_role', path_vars={ 'role_id': json_home.Parameters.ROLE_ID, - 'group_id': json_home.Parameters.GROUP_ID}) + 'group_id': json_home.Parameters.GROUP_ID, + }, + ), ] diff --git a/keystone/api/trusts.py b/keystone/api/trusts.py index 781b99efe5..85e611299a 100644 --- a/keystone/api/trusts.py +++ b/keystone/api/trusts.py @@ -43,7 +43,8 @@ _build_resource_relation = json_home_relations.os_trust_resource_rel_func _build_parameter_relation = json_home_relations.os_trust_parameter_rel_func TRUST_ID_PARAMETER_RELATION = _build_parameter_relation( - parameter_name='trust_id') + parameter_name='trust_id' +) def _build_trust_target_enforcement(): @@ -60,17 +61,21 @@ def _build_trust_target_enforcement(): def _trustor_trustee_only(trust): user_id = flask.request.environ.get(context.REQUEST_CONTEXT_ENV).user_id - if user_id not in [trust.get('trustee_user_id'), - trust.get('trustor_user_id')]: + if user_id not in [ + trust.get('trustee_user_id'), + trust.get('trustor_user_id'), + ]: raise exception.ForbiddenAction( - action=_('Requested user has no relation to this trust')) + action=_('Requested user has no relation to this trust') + ) def _normalize_trust_expires_at(trust): # correct isotime if trust.get('expires_at') is not None: - trust['expires_at'] = utils.isotime(trust['expires_at'], - subsecond=True) + trust['expires_at'] = utils.isotime( + trust['expires_at'], subsecond=True + ) def _normalize_trust_roles(trust): @@ -81,7 +86,8 @@ def _normalize_trust_roles(trust): try: matching_role = PROVIDERS.role_api.get_role(trust_role) full_role = ks_flask.ResourceBase.wrap_member( - matching_role, collection_name='roles', member_name='role') + matching_role, collection_name='roles', member_name='role' + ) trust_full_roles.append(full_role['role']) except exception.RoleNotFound: pass @@ -90,7 +96,8 @@ def _normalize_trust_roles(trust): trust['roles_links'] = { 'self': ks_flask.base_url(path='/%s/roles' % trust['id']), 'next': None, - 'previous': None} + 'previous': None, + } class TrustResource(ks_flask.ResourceBase): @@ -106,8 +113,10 @@ class TrustResource(ks_flask.ResourceBase): token = self.auth_context['token'] if 'application_credential' in token.methods: if not token.application_credential['unrestricted']: - action = _("Using method 'application_credential' is not " - "allowed for managing trusts.") + action = _( + "Using method 'application_credential' is not " + "allowed for managing trusts." + ) raise exception.ForbiddenAction(action=action) def _find_redelegated_trust(self): @@ -130,8 +139,9 @@ class TrustResource(ks_flask.ResourceBase): def _require_trustor_has_role_in_project(self, trust): trustor_roles = self._get_trustor_roles(trust) for trust_role in trust['roles']: - matching_roles = [x for x in trustor_roles - if x == trust_role['id']] + matching_roles = [ + x for x in trustor_roles if x == trust_role['id'] + ] if not matching_roles: raise exception.RoleNotFound(role_id=trust_role['id']) @@ -139,7 +149,8 @@ class TrustResource(ks_flask.ResourceBase): original_trust = trust.copy() while original_trust.get('redelegated_trust_id'): original_trust = PROVIDERS.trust_api.get_trust( - original_trust['redelegated_trust_id']) + original_trust['redelegated_trust_id'] + ) if not ((trust.get('project_id')) in [None, '']): # Check project exists. @@ -148,7 +159,9 @@ class TrustResource(ks_flask.ResourceBase): assignment_list = PROVIDERS.assignment_api.list_role_assignments( user_id=original_trust['trustor_user_id'], project_id=original_trust['project_id'], - effective=True, strip_domain_roles=False) + effective=True, + strip_domain_roles=False, + ) return list({x['role_id'] for x in assignment_list}) else: return [] @@ -160,12 +173,15 @@ class TrustResource(ks_flask.ResourceBase): roles.append({'id': role['id']}) else: roles.append( - PROVIDERS.role_api.get_unique_role_by_name(role['name'])) + PROVIDERS.role_api.get_unique_role_by_name(role['name']) + ) return roles def _get_trust(self, trust_id): - ENFORCER.enforce_call(action='identity:get_trust', - build_target=_build_trust_target_enforcement) + ENFORCER.enforce_call( + action='identity:get_trust', + build_target=_build_trust_target_enforcement, + ) # NOTE(cmurphy) look up trust before doing is_admin authorization - to # maintain the API contract, we expect a missing trust to raise a 404 @@ -176,7 +192,8 @@ class TrustResource(ks_flask.ResourceBase): # policies are not loaded for the is_admin context, so need to # block access here raise exception.ForbiddenAction( - action=_('Requested user has no relation to this trust')) + action=_('Requested user has no relation to this trust') + ) # NOTE(cmurphy) As of Train, the default policies enforce the # identity:get_trust rule. However, in case the @@ -189,7 +206,8 @@ class TrustResource(ks_flask.ResourceBase): LOG.warning( "The policy check string for rule \"identity:get_trust\" " "has been overridden to \"always true\". In the next release, " - "this will cause the" "\"identity:get_trust\" action to " + "this will cause the" + "\"identity:get_trust\" action to " "be fully permissive as hardcoded enforcement will be " "removed. To correct this issue, either stop overriding the " "\"identity:get_trust\" rule in config to accept the " @@ -206,12 +224,14 @@ class TrustResource(ks_flask.ResourceBase): trustee_user_id = flask.request.args.get('trustee_user_id') if trustor_user_id: target = {'trust': {'trustor_user_id': trustor_user_id}} - ENFORCER.enforce_call(action='identity:list_trusts_for_trustor', - target_attr=target) + ENFORCER.enforce_call( + action='identity:list_trusts_for_trustor', target_attr=target + ) elif trustee_user_id: target = {'trust': {'trustee_user_id': trustee_user_id}} - ENFORCER.enforce_call(action='identity:list_trusts_for_trustee', - target_attr=target) + ENFORCER.enforce_call( + action='identity:list_trusts_for_trustee', target_attr=target + ) else: ENFORCER.enforce_call(action='identity:list_trusts') @@ -244,10 +264,12 @@ class TrustResource(ks_flask.ResourceBase): trusts += PROVIDERS.trust_api.list_trusts() elif trustor_user_id: trusts += PROVIDERS.trust_api.list_trusts_for_trustor( - trustor_user_id) + trustor_user_id + ) elif trustee_user_id: trusts += PROVIDERS.trust_api.list_trusts_for_trustee( - trustee_user_id) + trustee_user_id + ) for trust in trusts: # get_trust returns roles, list_trusts does not @@ -257,8 +279,9 @@ class TrustResource(ks_flask.ResourceBase): del trust['roles'] if trust.get('expires_at') is not None: - trust['expires_at'] = utils.isotime(trust['expires_at'], - subsecond=True) + trust['expires_at'] = utils.isotime( + trust['expires_at'], subsecond=True + ) return self.wrap_collection(trusts) @@ -294,7 +317,8 @@ class TrustResource(ks_flask.ResourceBase): trust['roles'] = self._normalize_role_list(trust.get('roles', [])) self._require_trustor_has_role_in_project(trust) trust['expires_at'] = self._parse_expiration_date( - trust.get('expires_at')) + trust.get('expires_at') + ) trust = self._assign_unique_id(trust) redelegated_trust = self._find_redelegated_trust() return_trust = PROVIDERS.trust_api.create_trust( @@ -302,14 +326,17 @@ class TrustResource(ks_flask.ResourceBase): trust=trust, roles=trust['roles'], redelegated_trust=redelegated_trust, - initiator=self.audit_initiator) + initiator=self.audit_initiator, + ) _normalize_trust_expires_at(return_trust) _normalize_trust_roles(return_trust) return self.wrap_member(return_trust), http.client.CREATED def delete(self, trust_id): - ENFORCER.enforce_call(action='identity:delete_trust', - build_target=_build_trust_target_enforcement) + ENFORCER.enforce_call( + action='identity:delete_trust', + build_target=_build_trust_target_enforcement, + ) self._check_unrestricted() # NOTE(cmurphy) As of Train, the default policies enforce the @@ -323,19 +350,23 @@ class TrustResource(ks_flask.ResourceBase): LOG.warning( "The policy check string for rule \"identity:delete_trust\" " "has been overridden to \"always true\". In the next release, " - "this will cause the" "\"identity:delete_trust\" action to " + "this will cause the" + "\"identity:delete_trust\" action to " "be fully permissive as hardcoded enforcement will be " "removed. To correct this issue, either stop overriding the " "\"identity:delete_trust\" rule in config to accept the " "defaults, or explicitly set a rule that is not empty." ) trust = PROVIDERS.trust_api.get_trust(trust_id) - if (self.oslo_context.user_id != trust.get('trustor_user_id') and - not self.oslo_context.is_admin): + if ( + self.oslo_context.user_id != trust.get('trustor_user_id') + and not self.oslo_context.is_admin + ): action = _('Only admin or trustor can delete a trust') raise exception.ForbiddenAction(action=action) - PROVIDERS.trust_api.delete_trust(trust_id, - initiator=self.audit_initiator) + PROVIDERS.trust_api.delete_trust( + trust_id, initiator=self.audit_initiator + ) return '', http.client.NO_CONTENT @@ -349,8 +380,10 @@ class RolesForTrustListResource(flask_restful.Resource): return flask.request.environ.get(context.REQUEST_CONTEXT_ENV, None) def get(self, trust_id): - ENFORCER.enforce_call(action='identity:list_roles_for_trust', - build_target=_build_trust_target_enforcement) + ENFORCER.enforce_call( + action='identity:list_roles_for_trust', + build_target=_build_trust_target_enforcement, + ) # NOTE(morgan): This duplicates a little of the .get_trust from the # main resource, as it needs some of the same logic. However, due to @@ -360,7 +393,8 @@ class RolesForTrustListResource(flask_restful.Resource): # policies are not loaded for the is_admin context, so need to # block access here raise exception.ForbiddenAction( - action=_('Requested user has no relation to this trust')) + action=_('Requested user has no relation to this trust') + ) trust = PROVIDERS.trust_api.get_trust(trust_id) @@ -370,7 +404,8 @@ class RolesForTrustListResource(flask_restful.Resource): # default that would have been produced by the sample config, we need # to enforce it again and warn that the behavior is changing. rules = policy._ENFORCER._enforcer.rules.get( - 'identity:list_roles_for_trust') + 'identity:list_roles_for_trust' + ) # rule check_str is "" if isinstance(rules, op_checks.TrueCheck): LOG.warning( @@ -387,8 +422,7 @@ class RolesForTrustListResource(flask_restful.Resource): _normalize_trust_expires_at(trust) _normalize_trust_roles(trust) - return {'roles': trust['roles'], - 'links': trust['roles_links']} + return {'roles': trust['roles'], 'links': trust['roles_links']} # NOTE(morgan): Since this Resource is not being used with the automatic @@ -402,14 +436,17 @@ class RoleForTrustResource(flask_restful.Resource): def get(self, trust_id, role_id): """Get a role that has been assigned to a trust.""" - ENFORCER.enforce_call(action='identity:get_role_for_trust', - build_target=_build_trust_target_enforcement) + ENFORCER.enforce_call( + action='identity:get_role_for_trust', + build_target=_build_trust_target_enforcement, + ) if self.oslo_context.is_admin: # policies are not loaded for the is_admin context, so need to # block access here raise exception.ForbiddenAction( - action=_('Requested user has no relation to this trust')) + action=_('Requested user has no relation to this trust') + ) trust = PROVIDERS.trust_api.get_trust(trust_id) @@ -419,7 +456,8 @@ class RoleForTrustResource(flask_restful.Resource): # default that would have been produced by the sample config, we need # to enforce it again and warn that the behavior is changing. rules = policy._ENFORCER._enforcer.rules.get( - 'identity:get_role_for_trust') + 'identity:get_role_for_trust' + ) # rule check_str is "" if isinstance(rules, op_checks.TrueCheck): LOG.warning( @@ -438,8 +476,9 @@ class RoleForTrustResource(flask_restful.Resource): raise exception.RoleNotFound(role_id=role_id) role = PROVIDERS.role_api.get_role(role_id) - return ks_flask.ResourceBase.wrap_member(role, collection_name='roles', - member_name='role') + return ks_flask.ResourceBase.wrap_member( + role, collection_name='roles', member_name='role' + ) class TrustAPI(ks_flask.APIBase): @@ -452,9 +491,9 @@ class TrustAPI(ks_flask.APIBase): url='/trusts//roles', resource_kwargs={}, rel='trust_roles', - path_vars={ - 'trust_id': TRUST_ID_PARAMETER_RELATION}, - resource_relation_func=_build_resource_relation), + path_vars={'trust_id': TRUST_ID_PARAMETER_RELATION}, + resource_relation_func=_build_resource_relation, + ), ks_flask.construct_resource_map( resource=RoleForTrustResource, url='/trusts//roles/', @@ -462,8 +501,10 @@ class TrustAPI(ks_flask.APIBase): rel='trust_role', path_vars={ 'trust_id': TRUST_ID_PARAMETER_RELATION, - 'role_id': json_home.Parameters.ROLE_ID}, - resource_relation_func=_build_resource_relation), + 'role_id': json_home.Parameters.ROLE_ID, + }, + resource_relation_func=_build_resource_relation, + ), ] _api_url_prefix = '/OS-TRUST' diff --git a/keystone/api/users.py b/keystone/api/users.py index 387b2c8724..ff12bd9344 100644 --- a/keystone/api/users.py +++ b/keystone/api/users.py @@ -43,7 +43,8 @@ PROVIDERS = provider_api.ProviderAPIs ACCESS_TOKEN_ID_PARAMETER_RELATION = ( json_home_relations.os_oauth1_parameter_rel_func( - parameter_name='access_token_id') + parameter_name='access_token_id' + ) ) @@ -56,11 +57,13 @@ def _convert_v3_to_ec2_credential(credential): blob = jsonutils.loads(credential['blob']) except TypeError: blob = credential['blob'] - return {'user_id': credential.get('user_id'), - 'tenant_id': credential.get('project_id'), - 'access': blob.get('access'), - 'secret': blob.get('secret'), - 'trust_id': blob.get('trust_id')} + return { + 'user_id': credential.get('user_id'), + 'tenant_id': credential.get('project_id'), + 'access': blob.get('access'), + 'secret': blob.get('secret'), + 'trust_id': blob.get('trust_id'), + } def _format_token_entity(entity): @@ -73,12 +76,13 @@ def _format_token_entity(entity): if 'access_secret' in entity: formatted_entity.pop('access_secret') - url = ('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(access_token_id)s' - '/roles' % {'user_id': user_id, - 'access_token_id': access_token_id}) + url = ( + '/users/%(user_id)s/OS-OAUTH1/access_tokens/%(access_token_id)s' + '/roles' % {'user_id': user_id, 'access_token_id': access_token_id} + ) formatted_entity.setdefault('links', {}) - formatted_entity['links']['roles'] = (ks_flask.base_url(url)) + formatted_entity['links']['roles'] = ks_flask.base_url(url) return formatted_entity @@ -86,9 +90,11 @@ def _format_token_entity(entity): def _check_unrestricted_application_credential(token): if 'application_credential' in token.methods: if not token.application_credential['unrestricted']: - action = _("Using method 'application_credential' is not " - "allowed for managing additional application " - "credentials.") + action = _( + "Using method 'application_credential' is not " + "allowed for managing additional application " + "credentials." + ) raise ks_exception.ForbiddenAction(action=action) @@ -117,7 +123,8 @@ def _build_enforcer_target_data_owner_and_user_id_match(): if credential_id is not None: hashed_id = utils.hash_access_key(credential_id) ref['credential'] = PROVIDERS.credential_api.get_credential( - hashed_id) + hashed_id + ) return ref @@ -170,7 +177,8 @@ class UserResource(ks_flask.ResourceBase): collection_key = 'users' member_key = 'user' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='identity_api', method='get_user') + api='identity_api', method='get_user' + ) def get(self, user_id=None): """Get a user resource or list users. @@ -189,7 +197,7 @@ class UserResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:get_user', - build_target=_build_user_target_enforcement + build_target=_build_user_target_enforcement, ) ref = PROVIDERS.identity_api.get_user(user_id) return self.wrap_member(ref) @@ -199,8 +207,15 @@ class UserResource(ks_flask.ResourceBase): GET/HEAD /v3/users """ - filters = ('domain_id', 'enabled', 'idp_id', 'name', 'protocol_id', - 'unique_id', 'password_expires_at') + filters = ( + 'domain_id', + 'enabled', + 'idp_id', + 'name', + 'protocol_id', + 'unique_id', + 'password_expires_at', + ) target = None if self.oslo_context.domain_id: target = {'domain_id': self.oslo_context.domain_id} @@ -212,7 +227,8 @@ class UserResource(ks_flask.ResourceBase): if domain is None and self.oslo_context.domain_id: domain = self.oslo_context.domain_id refs = PROVIDERS.identity_api.list_users( - domain_scope=domain, hints=hints) + domain_scope=domain, hints=hints + ) # If the user making the request used a domain-scoped token, let's make # sure we filter out users that are not in that domain. Otherwise, we'd @@ -242,8 +258,8 @@ class UserResource(ks_flask.ResourceBase): user_data = self._normalize_dict(user_data) user_data = self._normalize_domain_id(user_data) ref = PROVIDERS.identity_api.create_user( - user_data, - initiator=self.audit_initiator) + user_data, initiator=self.audit_initiator + ) return self.wrap_member(ref), http.client.CREATED def patch(self, user_id): @@ -253,14 +269,15 @@ class UserResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:update_user', - build_target=_build_user_target_enforcement + build_target=_build_user_target_enforcement, ) PROVIDERS.identity_api.get_user(user_id) user_data = self.request_body_json.get('user', {}) validation.lazy_validate(schema.user_update, user_data) self._require_matching_id(user_data) ref = PROVIDERS.identity_api.update_user( - user_id, user_data, initiator=self.audit_initiator) + user_id, user_data, initiator=self.audit_initiator + ) return self.wrap_member(ref) def delete(self, user_id): @@ -270,10 +287,11 @@ class UserResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:delete_user', - build_target=_build_user_target_enforcement + build_target=_build_user_target_enforcement, ) PROVIDERS.identity_api.delete_user( - user_id, initiator=self.audit_initiator) + user_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT @@ -293,7 +311,8 @@ class UserChangePasswordResource(ks_flask.ResourceBase): user_id=user_id, original_password=user_data['original_password'], new_password=user_data['password'], - initiator=self.audit_initiator) + initiator=self.audit_initiator, + ) except AssertionError as e: raise ks_exception.Unauthorized( _('Error when changing user password: %s') % e @@ -305,13 +324,16 @@ class UserProjectsResource(ks_flask.ResourceBase): collection_key = 'projects' member_key = 'project' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='resource_api', method='get_project') + api='resource_api', method='get_project' + ) def get(self, user_id): filters = ('domain_id', 'enabled', 'name') - ENFORCER.enforce_call(action='identity:list_user_projects', - filters=filters, - build_target=_build_user_target_enforcement) + ENFORCER.enforce_call( + action='identity:list_user_projects', + filters=filters, + build_target=_build_user_target_enforcement, + ) hints = self.build_driver_hints(filters) refs = PROVIDERS.assignment_api.list_projects_for_user(user_id) return self.wrap_collection(refs, hints=hints) @@ -321,7 +343,8 @@ class UserGroupsResource(ks_flask.ResourceBase): collection_key = 'groups' member_key = 'group' get_member_from_driver = PROVIDERS.deferred_provider_lookup( - api='identity_api', method='get_group') + api='identity_api', method='get_group' + ) def get(self, user_id): """Get groups for a user. @@ -330,12 +353,15 @@ class UserGroupsResource(ks_flask.ResourceBase): """ filters = ('name',) hints = self.build_driver_hints(filters) - ENFORCER.enforce_call(action='identity:list_groups_for_user', - build_target=_build_user_target_enforcement, - filters=filters) - refs = PROVIDERS.identity_api.list_groups_for_user(user_id=user_id, - hints=hints) - if (self.oslo_context.domain_id): + ENFORCER.enforce_call( + action='identity:list_groups_for_user', + build_target=_build_user_target_enforcement, + filters=filters, + ) + refs = PROVIDERS.identity_api.list_groups_for_user( + user_id=user_id, hints=hints + ) + if self.oslo_context.domain_id: filtered_refs = [] for ref in refs: if ref['domain_id'] == self.oslo_context.domain_id: @@ -358,7 +384,8 @@ class _UserOSEC2CredBaseResource(ks_flask.ResourceBase): url = ks_flask.base_url(path) % { 'user_id': ref['user_id'], - 'credential_id': ref['access']} + 'credential_id': ref['access'], + } ref.setdefault('links', {}) ref['links']['self'] = url @@ -372,10 +399,10 @@ class UserOSEC2CredentialsResourceListCreate(_UserOSEC2CredBaseResource): ENFORCER.enforce_call(action='identity:ec2_list_credentials') PROVIDERS.identity_api.get_user(user_id) credential_refs = PROVIDERS.credential_api.list_credentials_for_user( - user_id, type=CRED_TYPE_EC2) + user_id, type=CRED_TYPE_EC2 + ) collection_refs = [ - _convert_v3_to_ec2_credential(cred) - for cred in credential_refs + _convert_v3_to_ec2_credential(cred) for cred in credential_refs ] return self.wrap_collection(collection_refs) @@ -386,15 +413,16 @@ class UserOSEC2CredentialsResourceListCreate(_UserOSEC2CredBaseResource): """ target = {} target['credential'] = {'user_id': user_id} - ENFORCER.enforce_call(action='identity:ec2_create_credential', - target_attr=target) + ENFORCER.enforce_call( + action='identity:ec2_create_credential', target_attr=target + ) PROVIDERS.identity_api.get_user(user_id) tenant_id = self.request_body_json.get('tenant_id') PROVIDERS.resource_api.get_project(tenant_id) blob = dict( access=uuid.uuid4().hex, secret=uuid.uuid4().hex, - trust_id=self.oslo_context.trust_id + trust_id=self.oslo_context.trust_id, ) credential_id = utils.hash_access_key(blob['access']) cred_data = dict( @@ -402,7 +430,7 @@ class UserOSEC2CredentialsResourceListCreate(_UserOSEC2CredBaseResource): project_id=tenant_id, blob=jsonutils.dumps(blob), id=credential_id, - type=CRED_TYPE_EC2 + type=CRED_TYPE_EC2, ) PROVIDERS.credential_api.create_credential(credential_id, cred_data) ref = _convert_v3_to_ec2_credential(cred_data) @@ -415,7 +443,8 @@ class UserOSEC2CredentialsResourceGetDelete(_UserOSEC2CredBaseResource): cred = PROVIDERS.credential_api.get_credential(credential_id) if not cred or cred['type'] != CRED_TYPE_EC2: raise ks_exception.Unauthorized( - message=_('EC2 access key not found.')) + message=_('EC2 access key not found.') + ) return _convert_v3_to_ec2_credential(cred) def get(self, user_id, credential_id): @@ -425,8 +454,8 @@ class UserOSEC2CredentialsResourceGetDelete(_UserOSEC2CredBaseResource): """ func = _build_enforcer_target_data_owner_and_user_id_match ENFORCER.enforce_call( - action='identity:ec2_get_credential', - build_target=func) + action='identity:ec2_get_credential', build_target=func + ) PROVIDERS.identity_api.get_user(user_id) ec2_cred_id = utils.hash_access_key(credential_id) cred_data = self._get_cred_data(ec2_cred_id) @@ -438,8 +467,9 @@ class UserOSEC2CredentialsResourceGetDelete(_UserOSEC2CredBaseResource): DELETE /users/{user_id}/credentials/OS-EC2/{credential_id} """ func = _build_enforcer_target_data_owner_and_user_id_match - ENFORCER.enforce_call(action='identity:ec2_delete_credential', - build_target=func) + ENFORCER.enforce_call( + action='identity:ec2_delete_credential', build_target=func + ) PROVIDERS.identity_api.get_user(user_id) ec2_cred_id = utils.hash_access_key(credential_id) self._get_cred_data(ec2_cred_id) @@ -473,10 +503,13 @@ class OAuth1ListAccessTokensResource(_OAuth1ResourceBase): ENFORCER.enforce_call(action='identity:list_access_tokens') if self.oslo_context.is_delegated_auth: raise ks_exception.Forbidden( - _('Cannot list request tokens with a token ' - 'issued via delegation.')) + _( + 'Cannot list request tokens with a token ' + 'issued via delegation.' + ) + ) refs = PROVIDERS.oauth_api.list_access_tokens(user_id) - formatted_refs = ([_format_token_entity(x) for x in refs]) + formatted_refs = [_format_token_entity(x) for x in refs] return self.wrap_collection(formatted_refs) @@ -500,7 +533,8 @@ class OAuth1AccessTokenCRUDResource(_OAuth1ResourceBase): """ ENFORCER.enforce_call( action='identity:ec2_delete_credential', - build_target=_build_enforcer_target_data_owner_and_user_id_match) + build_target=_build_enforcer_target_data_owner_and_user_id_match, + ) access_token = PROVIDERS.oauth_api.get_access_token(access_token_id) reason = ( 'Invalidating the token cache because an access token for ' @@ -511,7 +545,8 @@ class OAuth1AccessTokenCRUDResource(_OAuth1ResourceBase): ) notifications.invalidate_token_cache_notification(reason) PROVIDERS.oauth_api.delete_access_token( - user_id, access_token_id, initiator=self.audit_initiator) + user_id, access_token_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT @@ -531,7 +566,7 @@ class OAuth1AccessTokenRoleListResource(ks_flask.ResourceBase): raise ks_exception.NotFound() authed_role_ids = access_token['role_ids'] authed_role_ids = jsonutils.loads(authed_role_ids) - refs = ([_format_role_entity(x) for x in authed_role_ids]) + refs = [_format_role_entity(x) for x in authed_role_ids] return self.wrap_collection(refs) @@ -561,19 +596,21 @@ class OAuth1AccessTokenRoleResource(ks_flask.ResourceBase): class UserAppCredListCreateResource(ks_flask.ResourceBase): collection_key = 'application_credentials' member_key = 'application_credential' - _public_parameters = frozenset([ - 'id', - 'name', - 'description', - 'expires_at', - 'project_id', - 'roles', - # secret is only exposed after create, it is not stored - 'secret', - 'links', - 'unrestricted', - 'access_rules' - ]) + _public_parameters = frozenset( + [ + 'id', + 'name', + 'description', + 'expires_at', + 'project_id', + 'roles', + # secret is only exposed after create, it is not stored + 'secret', + 'links', + 'unrestricted', + 'access_rules', + ] + ) @staticmethod def _generate_secret(): @@ -591,8 +628,9 @@ class UserAppCredListCreateResource(ks_flask.ResourceBase): if role.get('id'): roles.append(role) else: - roles.append(PROVIDERS.role_api.get_unique_role_by_name( - role['name'])) + roles.append( + PROVIDERS.role_api.get_unique_role_by_name(role['name']) + ) return roles def _get_roles(self, app_cred_data, token): @@ -619,10 +657,13 @@ class UserAppCredListCreateResource(ks_flask.ResourceBase): token_roles = [r['id'] for r in token.roles] for role in roles: if role['id'] not in token_roles: - detail = _('Cannot create an application credential with ' - 'unassigned role') + detail = _( + 'Cannot create an application credential with ' + 'unassigned role' + ) raise ks_exception.ApplicationCredentialValidationError( - detail=detail) + detail=detail + ) else: roles = token.roles return roles @@ -633,8 +674,9 @@ class UserAppCredListCreateResource(ks_flask.ResourceBase): GET/HEAD /v3/users/{user_id}/application_credentials """ filters = ('name',) - ENFORCER.enforce_call(action='identity:list_application_credentials', - filters=filters) + ENFORCER.enforce_call( + action='identity:list_application_credentials', filters=filters + ) app_cred_api = PROVIDERS.application_credential_api hints = self.build_driver_hints(filters) refs = app_cred_api.list_application_credentials(user_id, hints=hints) @@ -647,14 +689,17 @@ class UserAppCredListCreateResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call(action='identity:create_application_credential') app_cred_data = self.request_body_json.get( - 'application_credential', {}) - validation.lazy_validate(app_cred_schema.application_credential_create, - app_cred_data) + 'application_credential', {} + ) + validation.lazy_validate( + app_cred_schema.application_credential_create, app_cred_data + ) token = self.auth_context['token'] _check_unrestricted_application_credential(token) if self.oslo_context.user_id != user_id: - action = _('Cannot create an application credential for another ' - 'user.') + action = _( + 'Cannot create an application credential for another ' 'user.' + ) raise ks_exception.ForbiddenAction(action=action) project_id = self.oslo_context.project_id app_cred_data = self._assign_unique_id(app_cred_data) @@ -665,7 +710,8 @@ class UserAppCredListCreateResource(ks_flask.ResourceBase): app_cred_data['roles'] = self._get_roles(app_cred_data, token) if app_cred_data.get('expires_at'): app_cred_data['expires_at'] = utils.parse_expiration_date( - app_cred_data['expires_at']) + app_cred_data['expires_at'] + ) if app_cred_data.get('access_rules'): for access_rule in app_cred_data['access_rules']: # If user provides an access rule by ID, it will be looked up @@ -681,13 +727,15 @@ class UserAppCredListCreateResource(ks_flask.ResourceBase): try: ref = app_cred_api.create_application_credential( - app_cred_data, initiator=self.audit_initiator) + app_cred_data, initiator=self.audit_initiator + ) except ks_exception.RoleAssignmentNotFound as e: # Raise a Bad Request, not a Not Found, in accordance with the # API-SIG recommendations: # https://specs.openstack.org/openstack/api-wg/guidelines/http.html#failure-code-clarifications raise ks_exception.ApplicationCredentialValidationError( - detail=str(e)) + detail=str(e) + ) return self.wrap_member(ref), http.client.CREATED @@ -707,7 +755,8 @@ class UserAppCredGetDeleteResource(ks_flask.ResourceBase): target_attr=target, ) ref = PROVIDERS.application_credential_api.get_application_credential( - application_credential_id) + application_credential_id + ) return self.wrap_member(ref) def delete(self, user_id, application_credential_id): @@ -718,13 +767,13 @@ class UserAppCredGetDeleteResource(ks_flask.ResourceBase): """ target = _update_request_user_id_attribute() ENFORCER.enforce_call( - action='identity:delete_application_credential', - target_attr=target + action='identity:delete_application_credential', target_attr=target ) token = self.auth_context['token'] _check_unrestricted_application_credential(token) PROVIDERS.application_credential_api.delete_application_credential( - application_credential_id, initiator=self.audit_initiator) + application_credential_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT @@ -737,10 +786,16 @@ class UserAccessRuleListResource(ks_flask.ResourceBase): GET/HEAD /v3/users/{user_id}/access_rules """ - filters = ('service', 'path', 'method',) - ENFORCER.enforce_call(action='identity:list_access_rules', - filters=filters, - build_target=_build_user_target_enforcement) + filters = ( + 'service', + 'path', + 'method', + ) + ENFORCER.enforce_call( + action='identity:list_access_rules', + filters=filters, + build_target=_build_user_target_enforcement, + ) app_cred_api = PROVIDERS.application_credential_api hints = self.build_driver_hints(filters) refs = app_cred_api.list_access_rules_for_user(user_id, hints=hints) @@ -759,10 +814,11 @@ class UserAccessRuleGetDeleteResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:get_access_rule', - build_target=_build_user_target_enforcement + build_target=_build_user_target_enforcement, ) ref = PROVIDERS.application_credential_api.get_access_rule( - access_rule_id) + access_rule_id + ) return self.wrap_member(ref) def delete(self, user_id, access_rule_id): @@ -772,10 +828,11 @@ class UserAccessRuleGetDeleteResource(ks_flask.ResourceBase): """ ENFORCER.enforce_call( action='identity:delete_access_rule', - build_target=_build_user_target_enforcement + build_target=_build_user_target_enforcement, ) PROVIDERS.application_credential_api.delete_access_rule( - access_rule_id, initiator=self.audit_initiator) + access_rule_id, initiator=self.audit_initiator + ) return None, http.client.NO_CONTENT @@ -789,21 +846,21 @@ class UserAPI(ks_flask.APIBase): url='/users//password', resource_kwargs={}, rel='user_change_password', - path_vars={'user_id': json_home.Parameters.USER_ID} + path_vars={'user_id': json_home.Parameters.USER_ID}, ), ks_flask.construct_resource_map( resource=UserGroupsResource, url='/users//groups', resource_kwargs={}, rel='user_groups', - path_vars={'user_id': json_home.Parameters.USER_ID} + path_vars={'user_id': json_home.Parameters.USER_ID}, ), ks_flask.construct_resource_map( resource=UserProjectsResource, url='/users//projects', resource_kwargs={}, rel='user_projects', - path_vars={'user_id': json_home.Parameters.USER_ID} + path_vars={'user_id': json_home.Parameters.USER_ID}, ), ks_flask.construct_resource_map( resource=UserOSEC2CredentialsResourceListCreate, @@ -811,21 +868,27 @@ class UserAPI(ks_flask.APIBase): resource_kwargs={}, rel='user_credentials', resource_relation_func=( - json_home_relations.os_ec2_resource_rel_func), - path_vars={'user_id': json_home.Parameters.USER_ID} + json_home_relations.os_ec2_resource_rel_func + ), + path_vars={'user_id': json_home.Parameters.USER_ID}, ), ks_flask.construct_resource_map( resource=UserOSEC2CredentialsResourceGetDelete, - url=('/users//credentials/OS-EC2/' - ''), + url=( + '/users//credentials/OS-EC2/' + '' + ), resource_kwargs={}, rel='user_credential', resource_relation_func=( - json_home_relations.os_ec2_resource_rel_func), + json_home_relations.os_ec2_resource_rel_func + ), path_vars={ 'credential_id': json_home.build_v3_parameter_relation( - 'credential_id'), - 'user_id': json_home.Parameters.USER_ID} + 'credential_id' + ), + 'user_id': json_home.Parameters.USER_ID, + }, ), ks_flask.construct_resource_map( resource=OAuth1ListAccessTokensResource, @@ -833,80 +896,99 @@ class UserAPI(ks_flask.APIBase): resource_kwargs={}, rel='user_access_tokens', resource_relation_func=( - json_home_relations.os_oauth1_resource_rel_func), - path_vars={'user_id': json_home.Parameters.USER_ID} + json_home_relations.os_oauth1_resource_rel_func + ), + path_vars={'user_id': json_home.Parameters.USER_ID}, ), ks_flask.construct_resource_map( resource=OAuth1AccessTokenCRUDResource, - url=('/users//OS-OAUTH1/' - 'access_tokens/'), + url=( + '/users//OS-OAUTH1/' + 'access_tokens/' + ), resource_kwargs={}, rel='user_access_token', resource_relation_func=( - json_home_relations.os_oauth1_resource_rel_func), + json_home_relations.os_oauth1_resource_rel_func + ), path_vars={ 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION, - 'user_id': json_home.Parameters.USER_ID} + 'user_id': json_home.Parameters.USER_ID, + }, ), ks_flask.construct_resource_map( resource=OAuth1AccessTokenRoleListResource, - url=('/users//OS-OAUTH1/access_tokens/' - '/roles'), + url=( + '/users//OS-OAUTH1/access_tokens/' + '/roles' + ), resource_kwargs={}, rel='user_access_token_roles', resource_relation_func=( - json_home_relations.os_oauth1_resource_rel_func), - path_vars={'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION, - 'user_id': json_home.Parameters.USER_ID} + json_home_relations.os_oauth1_resource_rel_func + ), + path_vars={ + 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION, + 'user_id': json_home.Parameters.USER_ID, + }, ), ks_flask.construct_resource_map( resource=OAuth1AccessTokenRoleResource, - url=('/users//OS-OAUTH1/access_tokens/' - '/roles/'), + url=( + '/users//OS-OAUTH1/access_tokens/' + '/roles/' + ), resource_kwargs={}, rel='user_access_token_role', resource_relation_func=( - json_home_relations.os_oauth1_resource_rel_func), - path_vars={'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID} + json_home_relations.os_oauth1_resource_rel_func + ), + path_vars={ + 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION, + 'role_id': json_home.Parameters.ROLE_ID, + 'user_id': json_home.Parameters.USER_ID, + }, ), ks_flask.construct_resource_map( resource=UserAppCredListCreateResource, url='/users//application_credentials', resource_kwargs={}, rel='application_credentials', - path_vars={'user_id': json_home.Parameters.USER_ID} + path_vars={'user_id': json_home.Parameters.USER_ID}, ), ks_flask.construct_resource_map( resource=UserAppCredGetDeleteResource, - url=('/users//application_credentials/' - ''), + url=( + '/users//application_credentials/' + '' + ), resource_kwargs={}, rel='application_credential', path_vars={ 'user_id': json_home.Parameters.USER_ID, - 'application_credential_id': - json_home.Parameters.APPLICATION_CRED_ID} + 'application_credential_id': json_home.Parameters.APPLICATION_CRED_ID, + }, ), ks_flask.construct_resource_map( resource=UserAccessRuleListResource, url='/users//access_rules', resource_kwargs={}, rel='access_rules', - path_vars={'user_id': json_home.Parameters.USER_ID} + path_vars={'user_id': json_home.Parameters.USER_ID}, ), ks_flask.construct_resource_map( resource=UserAccessRuleGetDeleteResource, - url=('/users//access_rules/' - ''), + url=( + '/users//access_rules/' + '' + ), resource_kwargs={}, rel='access_rule', path_vars={ 'user_id': json_home.Parameters.USER_ID, - 'access_rule_id': - json_home.Parameters.ACCESS_RULE_ID} - ) + 'access_rule_id': json_home.Parameters.ACCESS_RULE_ID, + }, + ), ] diff --git a/keystone/application_credential/backends/base.py b/keystone/application_credential/backends/base.py index 941682a65f..0edcc8277d 100644 --- a/keystone/application_credential/backends/base.py +++ b/keystone/application_credential/backends/base.py @@ -81,8 +81,9 @@ class ApplicationCredentialDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def delete_application_credentials_for_user_on_project(self, user_id, - project_id): + def delete_application_credentials_for_user_on_project( + self, user_id, project_id + ): """Delete all application credentials for a user on a given project. :param str user_id: ID of a user to whose application credentials diff --git a/keystone/application_credential/backends/sql.py b/keystone/application_credential/backends/sql.py index b0fe63890a..8a7e2d9e83 100644 --- a/keystone/application_credential/backends/sql.py +++ b/keystone/application_credential/backends/sql.py @@ -25,9 +25,18 @@ from keystone.i18n import _ class ApplicationCredentialModel(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'application_credential' - attributes = ['internal_id', 'id', 'name', 'secret_hash', 'description', - 'user_id', 'project_id', 'system', 'expires_at', - 'unrestricted'] + attributes = [ + 'internal_id', + 'id', + 'name', + 'secret_hash', + 'description', + 'user_id', + 'project_id', + 'system', + 'expires_at', + 'unrestricted', + ] internal_id = sql.Column(sql.Integer, primary_key=True, nullable=False) id = sql.Column(sql.String(64), nullable=False) name = sql.Column(sql.String(255), nullable=False) @@ -38,19 +47,24 @@ class ApplicationCredentialModel(sql.ModelBase, sql.ModelDictMixin): system = sql.Column(sql.String(64), nullable=True) expires_at = sql.Column(sql.DateTimeInt()) unrestricted = sql.Column(sql.Boolean) - __table_args__ = (sql.UniqueConstraint('name', 'user_id', - name='duplicate_app_cred_constraint'),) + __table_args__ = ( + sql.UniqueConstraint( + 'name', 'user_id', name='duplicate_app_cred_constraint' + ), + ) roles = sqlalchemy.orm.relationship( 'ApplicationCredentialRoleModel', backref=sqlalchemy.orm.backref('application_credential'), cascade='all, delete-orphan', - cascade_backrefs=False) + cascade_backrefs=False, + ) access_rules = sqlalchemy.orm.relationship( 'ApplicationCredentialAccessRuleModel', backref=sqlalchemy.orm.backref('application_credential'), cascade='all, delete-orphan', - cascade_backrefs=False) + cascade_backrefs=False, + ) class ApplicationCredentialRoleModel(sql.ModelBase, sql.ModelDictMixin): @@ -58,10 +72,12 @@ class ApplicationCredentialRoleModel(sql.ModelBase, sql.ModelDictMixin): attributes = ['application_credential_id', 'role_id'] application_credential_id = sql.Column( sql.Integer, - sql.ForeignKey('application_credential.internal_id', - ondelete='cascade'), + sql.ForeignKey( + 'application_credential.internal_id', ondelete='cascade' + ), primary_key=True, - nullable=False) + nullable=False, + ) role_id = sql.Column(sql.String(64), primary_key=True, nullable=False) @@ -75,13 +91,19 @@ class AccessRuleModel(sql.ModelBase, sql.ModelDictMixin): path = sql.Column(sql.String(128)) method = sql.Column(sql.String(16)) __table_args__ = ( - sql.UniqueConstraint('user_id', 'service', 'path', 'method', - name='duplicate_access_rule_for_user_constraint'), + sql.UniqueConstraint( + 'user_id', + 'service', + 'path', + 'method', + name='duplicate_access_rule_for_user_constraint', + ), ) application_credential = sqlalchemy.orm.relationship( 'ApplicationCredentialAccessRuleModel', backref=sqlalchemy.orm.backref('access_rule'), - cascade_backrefs=False) + cascade_backrefs=False, + ) class ApplicationCredentialAccessRuleModel(sql.ModelBase, sql.ModelDictMixin): @@ -89,15 +111,18 @@ class ApplicationCredentialAccessRuleModel(sql.ModelBase, sql.ModelDictMixin): attributes = ['application_credential_id', 'access_rule_id'] application_credential_id = sql.Column( sql.Integer, - sql.ForeignKey('application_credential.internal_id', - ondelete='cascade'), + sql.ForeignKey( + 'application_credential.internal_id', ondelete='cascade' + ), primary_key=True, - nullable=False) + nullable=False, + ) access_rule_id = sql.Column( sql.Integer, sql.ForeignKey('access_rule.id'), primary_key=True, - nullable=False) + nullable=False, + ) class ApplicationCredential(base.ApplicationCredentialDriverBase): @@ -115,7 +140,8 @@ class ApplicationCredential(base.ApplicationCredentialDriverBase): msg = _('Invalid application credential ID or secret') try: app_cred_ref = self.get_application_credential( - application_credential_id) + application_credential_id + ) except exception.ApplicationCredentialNotFound: raise AssertionError(msg) if not self._check_secret(secret, app_cred_ref): @@ -129,8 +155,9 @@ class ApplicationCredential(base.ApplicationCredentialDriverBase): app_cred_ref['secret_hash'] = hashed_secret @sql.handle_conflicts(conflict_type='application_credential') - def create_application_credential(self, application_credential, roles, - access_rules=None): + def create_application_credential( + self, application_credential, roles, access_rules=None + ): app_cred = application_credential.copy() self._hash_secret(app_cred) with sql.session_for_write() as session: @@ -143,23 +170,31 @@ class ApplicationCredential(base.ApplicationCredentialDriverBase): session.add(app_cred_role) if access_rules: for access_rule in access_rules: - access_rule_ref = session.query(AccessRuleModel).filter_by( - external_id=access_rule['id']).first() + access_rule_ref = ( + session.query(AccessRuleModel) + .filter_by(external_id=access_rule['id']) + .first() + ) if not access_rule_ref: query = session.query(AccessRuleModel) access_rule_ref = query.filter_by( user_id=app_cred['user_id'], service=access_rule['service'], path=access_rule['path'], - method=access_rule['method']).first() + method=access_rule['method'], + ).first() if not access_rule_ref: - access_rule_ref = AccessRuleModel.from_dict({ - k.replace('id', 'external_id'): v - for k, v in access_rule.items()}) + access_rule_ref = AccessRuleModel.from_dict( + { + k.replace('id', 'external_id'): v + for k, v in access_rule.items() + } + ) access_rule_ref['user_id'] = app_cred['user_id'] session.add(access_rule_ref) app_cred_access_rule = ( - ApplicationCredentialAccessRuleModel()) + ApplicationCredentialAccessRuleModel() + ) app_cred_access_rule.application_credential = ref app_cred_access_rule.access_rule = access_rule_ref session.add(app_cred_access_rule) @@ -190,19 +225,22 @@ class ApplicationCredential(base.ApplicationCredentialDriverBase): def get_application_credential(self, application_credential_id): with sql.session_for_read() as session: query = session.query(ApplicationCredentialModel).filter_by( - id=application_credential_id) + id=application_credential_id + ) ref = query.first() if ref is None: raise exception.ApplicationCredentialNotFound( - application_credential_id=application_credential_id) + application_credential_id=application_credential_id + ) app_cred_dict = self._to_dict(ref) return app_cred_dict def list_application_credentials_for_user(self, user_id, hints): with sql.session_for_read() as session: query = session.query(ApplicationCredentialModel) - query = sql.filter_limit_query(ApplicationCredentialModel, query, - hints) + query = sql.filter_limit_query( + ApplicationCredentialModel, query, hints + ) app_creds = query.filter_by(user_id=user_id) return [self._to_dict(ref) for ref in app_creds] @@ -211,10 +249,12 @@ class ApplicationCredential(base.ApplicationCredentialDriverBase): with sql.session_for_write() as session: query = session.query(ApplicationCredentialModel) app_cred_ref = query.filter_by( - id=application_credential_id).first() + id=application_credential_id + ).first() if not app_cred_ref: raise exception.ApplicationCredentialNotFound( - application_credential_id=application_credential_id) + application_credential_id=application_credential_id + ) session.delete(app_cred_ref) def delete_application_credentials_for_user(self, user_id): @@ -223,8 +263,9 @@ class ApplicationCredential(base.ApplicationCredentialDriverBase): query = query.filter_by(user_id=user_id) query.delete() - def delete_application_credentials_for_user_on_project(self, user_id, - project_id): + def delete_application_credentials_for_user_on_project( + self, user_id, project_id + ): with sql.session_for_write() as session: query = session.query(ApplicationCredentialModel) query = query.filter_by(user_id=user_id) @@ -234,11 +275,13 @@ class ApplicationCredential(base.ApplicationCredentialDriverBase): def get_access_rule(self, access_rule_id): with sql.session_for_read() as session: query = session.query(AccessRuleModel).filter_by( - external_id=access_rule_id) + external_id=access_rule_id + ) ref = query.first() if not ref: raise exception.AccessRuleNotFound( - access_rule_id=access_rule_id) + access_rule_id=access_rule_id + ) access_rule = self._access_rule_to_dict(ref) return access_rule @@ -255,11 +298,13 @@ class ApplicationCredential(base.ApplicationCredentialDriverBase): ref = query.filter_by(external_id=access_rule_id).first() if not ref: raise exception.AccessRuleNotFound( - access_rule_id=access_rule_id) + access_rule_id=access_rule_id + ) session.delete(ref) except AssertionError: raise exception.ForbiddenNotSecurity( - "May not delete access rule in use") + "May not delete access rule in use" + ) def delete_access_rules_for_user(self, user_id): with sql.session_for_write() as session: diff --git a/keystone/application_credential/core.py b/keystone/application_credential/core.py index 8230b65443..2ae27302a6 100644 --- a/keystone/application_credential/core.py +++ b/keystone/application_credential/core.py @@ -48,43 +48,50 @@ class Manager(manager.Manager): def _register_callback_listeners(self): notifications.register_event_callback( - notifications.ACTIONS.deleted, 'user', - self._delete_app_creds_on_user_delete_callback) + notifications.ACTIONS.deleted, + 'user', + self._delete_app_creds_on_user_delete_callback, + ) notifications.register_event_callback( - notifications.ACTIONS.disabled, 'user', - self._delete_app_creds_on_user_delete_callback) + notifications.ACTIONS.disabled, + 'user', + self._delete_app_creds_on_user_delete_callback, + ) notifications.register_event_callback( notifications.ACTIONS.internal, notifications.REMOVE_APP_CREDS_FOR_USER, - self._delete_app_creds_on_assignment_removal) + self._delete_app_creds_on_assignment_removal, + ) def _delete_app_creds_on_user_delete_callback( - self, service, resource_type, operation, payload): + self, service, resource_type, operation, payload + ): user_id = payload['resource_info'] self._delete_application_credentials_for_user(user_id) self._delete_access_rules_for_user(user_id) def _delete_app_creds_on_assignment_removal( - self, service, resource_type, operation, payload): + self, service, resource_type, operation, payload + ): user_id = payload['resource_info']['user_id'] project_id = payload['resource_info']['project_id'] - self._delete_application_credentials_for_user_on_project(user_id, - project_id) + self._delete_application_credentials_for_user_on_project( + user_id, project_id + ) def _get_user_roles(self, user_id, project_id): assignment_list = self.assignment_api.list_role_assignments( - user_id=user_id, - project_id=project_id, - effective=True) + user_id=user_id, project_id=project_id, effective=True + ) return list(set([x['role_id'] for x in assignment_list])) def _require_user_has_role_in_project(self, roles, user_id, project_id): user_roles = self._get_user_roles(user_id, project_id) for role in roles: if role['id'] not in user_roles: - raise exception.RoleAssignmentNotFound(role_id=role['id'], - actor_id=user_id, - target_id=project_id) + raise exception.RoleAssignmentNotFound( + role_id=role['id'], actor_id=user_id, target_id=project_id + ) def _assert_limit_not_exceeded(self, user_id): user_limit = CONF.application_credential.user_limit @@ -92,7 +99,8 @@ class Manager(manager.Manager): app_cred_count = len(self.list_application_credentials(user_id)) if app_cred_count >= user_limit: raise exception.ApplicationCredentialLimitExceeded( - limit=user_limit) + limit=user_limit + ) def _get_role_list(self, app_cred_roles): roles = [] @@ -112,12 +120,12 @@ class Manager(manager.Manager): def _process_app_cred(self, app_cred_ref): app_cred_ref = app_cred_ref.copy() app_cred_ref.pop('secret_hash') - app_cred_ref['roles'] = self._get_role_list( - app_cred_ref['roles']) + app_cred_ref['roles'] = self._get_role_list(app_cred_ref['roles']) return app_cred_ref - def create_application_credential(self, application_credential, - initiator=None): + def create_application_credential( + self, application_credential, initiator=None + ): """Create a new application credential. :param dict application_credential: Application Credential data @@ -135,13 +143,13 @@ class Manager(manager.Manager): self._require_user_has_role_in_project(roles, user_id, project_id) unhashed_secret = application_credential['secret'] ref = self.driver.create_application_credential( - application_credential, roles, access_rules) + application_credential, roles, access_rules + ) ref['secret'] = unhashed_secret ref = self._process_app_cred(ref) notifications.Audit.created( - self._APP_CRED, - application_credential['id'], - initiator) + self._APP_CRED, application_credential['id'], initiator + ) return ref @MEMOIZE @@ -153,7 +161,8 @@ class Manager(manager.Manager): :returns: an application credential """ app_cred = self.driver.get_application_credential( - application_credential_id) + application_credential_id + ) return self._process_app_cred(app_cred) def list_application_credentials(self, user_id, hints=None): @@ -166,7 +175,8 @@ class Manager(manager.Manager): """ hints = hints or driver_hints.Hints() app_cred_list = self.driver.list_application_credentials_for_user( - user_id, hints) + user_id, hints + ) return [self._process_app_cred(app_cred) for app_cred in app_cred_list] @MEMOIZE @@ -189,8 +199,9 @@ class Manager(manager.Manager): hints = hints or driver_hints.Hints() return self.driver.list_access_rules_for_user(user_id, hints) - def delete_application_credential(self, application_credential_id, - initiator=None): + def delete_application_credential( + self, application_credential_id, initiator=None + ): """Delete an application credential. :param str application_credential_id: Application Credential ID @@ -200,13 +211,16 @@ class Manager(manager.Manager): application credential doesn't exist. """ self.driver.delete_application_credential(application_credential_id) - self.get_application_credential.invalidate(self, - application_credential_id) + self.get_application_credential.invalidate( + self, application_credential_id + ) notifications.Audit.deleted( - self._APP_CRED, application_credential_id, initiator) + self._APP_CRED, application_credential_id, initiator + ) - def _delete_application_credentials_for_user(self, user_id, - initiator=None): + def _delete_application_credentials_for_user( + self, user_id, initiator=None + ): """Delete all application credentials for a user. :param str user_id: User ID @@ -214,15 +228,18 @@ class Manager(manager.Manager): This is triggered when a user is deleted. """ app_creds = self.driver.list_application_credentials_for_user( - user_id, driver_hints.Hints()) + user_id, driver_hints.Hints() + ) self.driver.delete_application_credentials_for_user(user_id) for app_cred in app_creds: self.get_application_credential.invalidate(self, app_cred['id']) - notifications.Audit.deleted(self._APP_CRED, app_cred['id'], - initiator) + notifications.Audit.deleted( + self._APP_CRED, app_cred['id'], initiator + ) - def _delete_application_credentials_for_user_on_project(self, user_id, - project_id): + def _delete_application_credentials_for_user_on_project( + self, user_id, project_id + ): """Delete all application credentials for a user on a given project. :param str user_id: User ID @@ -233,10 +250,12 @@ class Manager(manager.Manager): hints = driver_hints.Hints() hints.add_filter('project_id', project_id) app_creds = self.driver.list_application_credentials_for_user( - user_id, hints) + user_id, hints + ) self.driver.delete_application_credentials_for_user_on_project( - user_id, project_id) + user_id, project_id + ) for app_cred in app_creds: self.get_application_credential.invalidate(self, app_cred['id']) @@ -252,7 +271,8 @@ class Manager(manager.Manager): self.driver.delete_access_rule(access_rule_id) self.get_access_rule.invalidate(self, access_rule_id) notifications.Audit.deleted( - self._ACCESS_RULE, access_rule_id, initiator) + self._ACCESS_RULE, access_rule_id, initiator + ) def _delete_access_rules_for_user(self, user_id, initiator=None): """Delete all access rules for a user. @@ -262,9 +282,11 @@ class Manager(manager.Manager): This is triggered when a user is deleted. """ access_rules = self.driver.list_access_rules_for_user( - user_id, driver_hints.Hints()) + user_id, driver_hints.Hints() + ) self.driver.delete_access_rules_for_user(user_id) for rule in access_rules: self.get_access_rule.invalidate(self, rule['id']) - notifications.Audit.deleted(self._ACCESS_RULE, rule['id'], - initiator) + notifications.Audit.deleted( + self._ACCESS_RULE, rule['id'], initiator + ) diff --git a/keystone/application_credential/schema.py b/keystone/application_credential/schema.py index f6138db22e..d2f04be954 100644 --- a/keystone/application_credential/schema.py +++ b/keystone/application_credential/schema.py @@ -21,12 +21,12 @@ _role_properties = { 'type': 'object', 'properties': { 'id': parameter_types.id_string, - 'name': parameter_types.name + 'name': parameter_types.name, }, 'minProperties': 1, 'maxProperties': 1, - 'additionalProperties': False - } + 'additionalProperties': False, + }, } _access_rules_properties = { @@ -38,36 +38,32 @@ _access_rules_properties = { 'type': 'string', 'minLength': 0, 'maxLength': 225, - 'pattern': r'^\/.*' + 'pattern': r'^\/.*', }, 'method': { 'type': 'string', - 'pattern': r'^(POST|GET|HEAD|PATCH|PUT|DELETE)$' + 'pattern': r'^(POST|GET|HEAD|PATCH|PUT|DELETE)$', }, 'service': parameter_types.id_string, 'id': parameter_types.id_string, }, - 'additionalProperties': False - } + 'additionalProperties': False, + }, } _application_credential_properties = { 'name': parameter_types.name, 'description': validation.nullable(parameter_types.description), - 'secret': { - 'type': ['null', 'string'] - }, - 'expires_at': { - 'type': ['null', 'string'] - }, + 'secret': {'type': ['null', 'string']}, + 'expires_at': {'type': ['null', 'string']}, 'roles': _role_properties, 'unrestricted': parameter_types.boolean, - 'access_rules': _access_rules_properties + 'access_rules': _access_rules_properties, } application_credential_create = { 'type': 'object', 'properties': _application_credential_properties, 'required': ['name'], - 'additionalProperties': True + 'additionalProperties': True, } diff --git a/keystone/assignment/backends/base.py b/keystone/assignment/backends/base.py index 0cf6146ee0..468be16982 100644 --- a/keystone/assignment/backends/base.py +++ b/keystone/assignment/backends/base.py @@ -48,9 +48,15 @@ class AssignmentDriverBase(object, metaclass=abc.ABCMeta): # assignment/grant crud @abc.abstractmethod - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def create_grant( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): """Create a new assignment/grant. If the assignment is to a domain, then optionally it may be @@ -61,16 +67,27 @@ class AssignmentDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def list_grant_role_ids(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def list_grant_role_ids( + self, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): """List role ids for assignments/grants.""" raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def check_grant_role_id(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def check_grant_role_id( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): """Check an assignment/grant role id. :raises keystone.exception.RoleAssignmentNotFound: If the role @@ -81,9 +98,15 @@ class AssignmentDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def delete_grant( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): """Delete assignments/grants. :raises keystone.exception.RoleAssignmentNotFound: If the role @@ -93,10 +116,15 @@ class AssignmentDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def list_role_assignments(self, role_id=None, - user_id=None, group_ids=None, - domain_id=None, project_ids=None, - inherited_to_projects=None): + def list_role_assignments( + self, + role_id=None, + user_id=None, + group_ids=None, + domain_id=None, + project_ids=None, + inherited_to_projects=None, + ): """Return a list of role assignments for actors on targets. Available parameters represent values in which the returned role @@ -144,8 +172,9 @@ class AssignmentDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() @abc.abstractmethod - def create_system_grant(self, role_id, actor_id, target_id, - assignment_type, inherited): + def create_system_grant( + self, role_id, actor_id, target_id, assignment_type, inherited + ): """Grant a user or group a role on the system. :param role_id: the unique ID of the role to grant to the user diff --git a/keystone/assignment/backends/sql.py b/keystone/assignment/backends/sql.py index d887c6bbd0..f58621e2f7 100644 --- a/keystone/assignment/backends/sql.py +++ b/keystone/assignment/backends/sql.py @@ -46,27 +46,42 @@ class Assignment(base.AssignmentDriverBase): def default_role_driver(cls): return 'sql' - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def create_grant( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): assignment_type = AssignmentType.calculate_type( - user_id, group_id, project_id, domain_id) + user_id, group_id, project_id, domain_id + ) try: with sql.session_for_write() as session: - session.add(RoleAssignment( - type=assignment_type, - actor_id=user_id or group_id, - target_id=project_id or domain_id, - role_id=role_id, - inherited=inherited_to_projects)) + session.add( + RoleAssignment( + type=assignment_type, + actor_id=user_id or group_id, + target_id=project_id or domain_id, + role_id=role_id, + inherited=inherited_to_projects, + ) + ) except sql.DBDuplicateEntry: # nosec : The v3 grant APIs are silent if # the assignment already exists pass - def list_grant_role_ids(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def list_grant_role_ids( + self, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): with sql.session_for_read() as session: q = session.query(RoleAssignment.role_id) q = q.filter(RoleAssignment.actor_id == (user_id or group_id)) @@ -74,62 +89,104 @@ class Assignment(base.AssignmentDriverBase): q = q.filter(RoleAssignment.inherited == inherited_to_projects) return [x.role_id for x in q.all()] - def _build_grant_filter(self, session, role_id, user_id, group_id, - domain_id, project_id, inherited_to_projects): + def _build_grant_filter( + self, + session, + role_id, + user_id, + group_id, + domain_id, + project_id, + inherited_to_projects, + ): q = session.query(RoleAssignment) q = q.filter_by(actor_id=user_id or group_id) if domain_id: q = q.filter_by(target_id=domain_id).filter( - (RoleAssignment.type == AssignmentType.USER_DOMAIN) | - (RoleAssignment.type == AssignmentType.GROUP_DOMAIN)) + (RoleAssignment.type == AssignmentType.USER_DOMAIN) + | (RoleAssignment.type == AssignmentType.GROUP_DOMAIN) + ) else: q = q.filter_by(target_id=project_id).filter( - (RoleAssignment.type == AssignmentType.USER_PROJECT) | - (RoleAssignment.type == AssignmentType.GROUP_PROJECT)) + (RoleAssignment.type == AssignmentType.USER_PROJECT) + | (RoleAssignment.type == AssignmentType.GROUP_PROJECT) + ) q = q.filter_by(role_id=role_id) q = q.filter_by(inherited=inherited_to_projects) return q - def check_grant_role_id(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def check_grant_role_id( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): with sql.session_for_read() as session: try: q = self._build_grant_filter( - session, role_id, user_id, group_id, domain_id, project_id, - inherited_to_projects) + session, + role_id, + user_id, + group_id, + domain_id, + project_id, + inherited_to_projects, + ) q.one() except sql.NotFound: actor_id = user_id or group_id target_id = domain_id or project_id - raise exception.RoleAssignmentNotFound(role_id=role_id, - actor_id=actor_id, - target_id=target_id) + raise exception.RoleAssignmentNotFound( + role_id=role_id, actor_id=actor_id, target_id=target_id + ) - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def delete_grant( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): with sql.session_for_write() as session: q = self._build_grant_filter( - session, role_id, user_id, group_id, domain_id, project_id, - inherited_to_projects) + session, + role_id, + user_id, + group_id, + domain_id, + project_id, + inherited_to_projects, + ) if not q.delete(False): actor_id = user_id or group_id target_id = domain_id or project_id - raise exception.RoleAssignmentNotFound(role_id=role_id, - actor_id=actor_id, - target_id=target_id) + raise exception.RoleAssignmentNotFound( + role_id=role_id, actor_id=actor_id, target_id=target_id + ) def add_role_to_user_and_project(self, user_id, project_id, role_id): try: with sql.session_for_write() as session: - session.add(RoleAssignment( - type=AssignmentType.USER_PROJECT, - actor_id=user_id, target_id=project_id, - role_id=role_id, inherited=False)) + session.add( + RoleAssignment( + type=AssignmentType.USER_PROJECT, + actor_id=user_id, + target_id=project_id, + role_id=role_id, + inherited=False, + ) + ) except sql.DBDuplicateEntry: - msg = ('User %s already has role %s in tenant %s' - % (user_id, role_id, project_id)) + msg = 'User %s already has role %s in tenant %s' % ( + user_id, + role_id, + project_id, + ) raise exception.Conflict(type='role grant', details=msg) def remove_role_from_user_and_project(self, user_id, project_id, role_id): @@ -139,9 +196,12 @@ class Assignment(base.AssignmentDriverBase): q = q.filter_by(target_id=project_id) q = q.filter_by(role_id=role_id) if q.delete() == 0: - raise exception.RoleNotFound(message=_( - 'Cannot remove role that has not been granted, %s') % - role_id) + raise exception.RoleNotFound( + message=_( + 'Cannot remove role that has not been granted, %s' + ) + % role_id + ) def _get_user_assignment_types(self): return [AssignmentType.USER_PROJECT, AssignmentType.USER_DOMAIN] @@ -195,10 +255,15 @@ class Assignment(base.AssignmentDriverBase): return actor_types or target_types - def list_role_assignments(self, role_id=None, - user_id=None, group_ids=None, - domain_id=None, project_ids=None, - inherited_to_projects=None): + def list_role_assignments( + self, + role_id=None, + user_id=None, + group_ids=None, + domain_id=None, + project_ids=None, + inherited_to_projects=None, + ): def denormalize_role(ref): assignment = {} @@ -215,9 +280,10 @@ class Assignment(base.AssignmentDriverBase): assignment['group_id'] = ref.actor_id assignment['domain_id'] = ref.target_id else: - raise exception.Error(message=_( - 'Unexpected assignment type encountered, %s') % - ref.type) + raise exception.Error( + message=_('Unexpected assignment type encountered, %s') + % ref.type + ) assignment['role_id'] = ref.role_id if ref.inherited: assignment['inherited_to_projects'] = 'projects' @@ -225,7 +291,8 @@ class Assignment(base.AssignmentDriverBase): with sql.session_for_read() as session: assignment_types = self._get_assignment_types( - user_id, group_ids, project_ids, domain_id) + user_id, group_ids, project_ids, domain_id + ) targets = None if project_ids: @@ -258,8 +325,9 @@ class Assignment(base.AssignmentDriverBase): with sql.session_for_write() as session: q = session.query(RoleAssignment) q = q.filter_by(target_id=project_id).filter( - RoleAssignment.type.in_((AssignmentType.USER_PROJECT, - AssignmentType.GROUP_PROJECT)) + RoleAssignment.type.in_( + (AssignmentType.USER_PROJECT, AssignmentType.GROUP_PROJECT) + ) ) q.delete(False) @@ -278,16 +346,18 @@ class Assignment(base.AssignmentDriverBase): with sql.session_for_write() as session: q = session.query(RoleAssignment) q = q.filter(RoleAssignment.target_id == domain_id).filter( - (RoleAssignment.type == AssignmentType.USER_DOMAIN) | - (RoleAssignment.type == AssignmentType.GROUP_DOMAIN)) + (RoleAssignment.type == AssignmentType.USER_DOMAIN) + | (RoleAssignment.type == AssignmentType.GROUP_DOMAIN) + ) q.delete(False) def delete_user_assignments(self, user_id): with sql.session_for_write() as session: q = session.query(RoleAssignment) q = q.filter_by(actor_id=user_id).filter( - RoleAssignment.type.in_((AssignmentType.USER_PROJECT, - AssignmentType.USER_DOMAIN)) + RoleAssignment.type.in_( + (AssignmentType.USER_PROJECT, AssignmentType.USER_DOMAIN) + ) ) q.delete(False) @@ -295,13 +365,15 @@ class Assignment(base.AssignmentDriverBase): with sql.session_for_write() as session: q = session.query(RoleAssignment) q = q.filter_by(actor_id=group_id).filter( - RoleAssignment.type.in_((AssignmentType.GROUP_PROJECT, - AssignmentType.GROUP_DOMAIN)) + RoleAssignment.type.in_( + (AssignmentType.GROUP_PROJECT, AssignmentType.GROUP_DOMAIN) + ) ) q.delete(False) - def create_system_grant(self, role_id, actor_id, target_id, - assignment_type, inherited): + def create_system_grant( + self, role_id, actor_id, target_id, assignment_type, inherited + ): try: with sql.session_for_write() as session: session.add( @@ -310,7 +382,7 @@ class Assignment(base.AssignmentDriverBase): actor_id=actor_id, target_id=target_id, role_id=role_id, - inherited=inherited + inherited=inherited, ) ) except sql.DBDuplicateEntry: # nosec : The v3 grant APIs are silent if @@ -368,17 +440,23 @@ class RoleAssignment(sql.ModelBase, sql.ModelDictMixin): attributes = ['type', 'actor_id', 'target_id', 'role_id', 'inherited'] # NOTE(henry-nash): Postgres requires a name to be defined for an Enum type = sql.Column( - sql.Enum(AssignmentType.USER_PROJECT, AssignmentType.GROUP_PROJECT, - AssignmentType.USER_DOMAIN, AssignmentType.GROUP_DOMAIN, - name='type'), - nullable=False) + sql.Enum( + AssignmentType.USER_PROJECT, + AssignmentType.GROUP_PROJECT, + AssignmentType.USER_DOMAIN, + AssignmentType.GROUP_DOMAIN, + name='type', + ), + nullable=False, + ) actor_id = sql.Column(sql.String(64), nullable=False) target_id = sql.Column(sql.String(64), nullable=False) role_id = sql.Column(sql.String(64), nullable=False) inherited = sql.Column(sql.Boolean, default=False, nullable=False) __table_args__ = ( - sql.PrimaryKeyConstraint('type', 'actor_id', 'target_id', 'role_id', - 'inherited'), + sql.PrimaryKeyConstraint( + 'type', 'actor_id', 'target_id', 'role_id', 'inherited' + ), sql.Index('ix_actor_id', 'actor_id'), ) @@ -400,8 +478,9 @@ class SystemRoleAssignment(sql.ModelBase, sql.ModelDictMixin): role_id = sql.Column(sql.String(64), nullable=False) inherited = sql.Column(sql.Boolean, default=False, nullable=False) __table_args__ = ( - sql.PrimaryKeyConstraint('type', 'actor_id', 'target_id', 'role_id', - 'inherited'), + sql.PrimaryKeyConstraint( + 'type', 'actor_id', 'target_id', 'role_id', 'inherited' + ), sql.Index('ix_system_actor_id', 'actor_id'), ) diff --git a/keystone/assignment/core.py b/keystone/assignment/core.py index f986b20cd2..7c0b284774 100644 --- a/keystone/assignment/core.py +++ b/keystone/assignment/core.py @@ -43,8 +43,8 @@ MEMOIZE = cache.get_memoization_decorator(group='role') # any role assignment should invalidate this entire cache region. COMPUTED_ASSIGNMENTS_REGION = cache.create_region(name='computed assignments') MEMOIZE_COMPUTED_ASSIGNMENTS = cache.get_memoization_decorator( - group='role', - region=COMPUTED_ASSIGNMENTS_REGION) + group='role', region=COMPUTED_ASSIGNMENTS_REGION +) @notifications.listener @@ -76,21 +76,25 @@ class Manager(manager.Manager): }, } - def _delete_domain_assignments(self, service, resource_type, operations, - payload): + def _delete_domain_assignments( + self, service, resource_type, operations, payload + ): domain_id = payload['resource_info'] self.driver.delete_domain_assignments(domain_id) def _get_group_ids_for_user_id(self, user_id): # TODO(morganfainberg): Implement a way to get only group_ids # instead of the more expensive to_dict() call for each record. - return [x['id'] for - x in PROVIDERS.identity_api.list_groups_for_user(user_id)] + return [ + x['id'] + for x in PROVIDERS.identity_api.list_groups_for_user(user_id) + ] def list_user_ids_for_project(self, project_id): PROVIDERS.resource_api.get_project(project_id) assignment_list = self.list_role_assignments( - project_id=project_id, effective=True) + project_id=project_id, effective=True + ) # Use set() to process the list to remove any duplicates return list(set([x['user_id'] for x in assignment_list])) @@ -105,7 +109,7 @@ class Manager(manager.Manager): if 'user_id' in assignment and 'project_id' in assignment: payload = { 'user_id': assignment['user_id'], - 'project_id': assignment['project_id'] + 'project_id': assignment['project_id'], } notifications.Audit.internal( notifications.REMOVE_APP_CREDS_FOR_USER, payload @@ -126,7 +130,8 @@ class Manager(manager.Manager): """ PROVIDERS.resource_api.get_project(project_id) assignment_list = self.list_role_assignments( - user_id=user_id, project_id=project_id, effective=True) + user_id=user_id, project_id=project_id, effective=True + ) # Use set() to process the list to remove any duplicates return list(set([x['role_id'] for x in assignment_list])) @@ -145,8 +150,11 @@ class Manager(manager.Manager): """ PROVIDERS.resource_api.get_project(project_id) assignment_list = self.list_role_assignments( - user_id=trustor_id, project_id=project_id, effective=True, - strip_domain_roles=False) + user_id=trustor_id, + project_id=project_id, + effective=True, + strip_domain_roles=False, + ) # Use set() to process the list to remove any duplicates return list(set([x['role_id'] for x in assignment_list])) @@ -160,7 +168,8 @@ class Manager(manager.Manager): """ PROVIDERS.resource_api.get_domain(domain_id) assignment_list = self.list_role_assignments( - user_id=user_id, domain_id=domain_id, effective=True) + user_id=user_id, domain_id=domain_id, effective=True + ) # Use set() to process the list to remove any duplicates return list(set([x['role_id'] for x in assignment_list])) @@ -174,12 +183,16 @@ class Manager(manager.Manager): if project_id is not None: PROVIDERS.resource_api.get_project(project_id) assignment_list = self.list_role_assignments( - source_from_group_ids=group_ids, project_id=project_id, - effective=True) + source_from_group_ids=group_ids, + project_id=project_id, + effective=True, + ) elif domain_id is not None: assignment_list = self.list_role_assignments( - source_from_group_ids=group_ids, domain_id=domain_id, - effective=True) + source_from_group_ids=group_ids, + domain_id=domain_id, + effective=True, + ) else: raise AttributeError(_("Must specify either domain or project")) @@ -187,11 +200,16 @@ class Manager(manager.Manager): return PROVIDERS.role_api.list_roles_from_ids(role_ids) @notifications.role_assignment('created') - def _add_role_to_user_and_project_adapter(self, role_id, user_id=None, - group_id=None, domain_id=None, - project_id=None, - inherited_to_projects=False, - context=None): + def _add_role_to_user_and_project_adapter( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + context=None, + ): # The parameters for this method must match the parameters for # create_grant so that the notifications.role_assignment decorator @@ -203,7 +221,8 @@ class Manager(manager.Manager): def add_role_to_user_and_project(self, user_id, project_id, role_id): self._add_role_to_user_and_project_adapter( - role_id, user_id=user_id, project_id=project_id) + role_id, user_id=user_id, project_id=project_id + ) COMPUTED_ASSIGNMENTS_REGION.invalidate() # TODO(henry-nash): We might want to consider list limiting this at some @@ -218,10 +237,18 @@ class Manager(manager.Manager): # caching. Please see https://bugs.launchpad.net/keystone/+bug/1700852 # for more details. assignment_list = self.list_role_assignments( - user_id=user_id, effective=True) + user_id=user_id, effective=True + ) # Use set() to process the list to remove any duplicates - project_ids = list(set([x['project_id'] for x in assignment_list - if x.get('project_id')])) + project_ids = list( + set( + [ + x['project_id'] + for x in assignment_list + if x.get('project_id') + ] + ) + ) return PROVIDERS.resource_api.list_projects_from_ids(project_ids) # TODO(henry-nash): We might want to consider list limiting this at some @@ -229,44 +256,64 @@ class Manager(manager.Manager): @MEMOIZE_COMPUTED_ASSIGNMENTS def list_domains_for_user(self, user_id): assignment_list = self.list_role_assignments( - user_id=user_id, effective=True) + user_id=user_id, effective=True + ) # Use set() to process the list to remove any duplicates - domain_ids = list(set([x['domain_id'] for x in assignment_list - if x.get('domain_id')])) + domain_ids = list( + set( + [x['domain_id'] for x in assignment_list if x.get('domain_id')] + ) + ) return PROVIDERS.resource_api.list_domains_from_ids(domain_ids) def list_domains_for_groups(self, group_ids): assignment_list = self.list_role_assignments( - source_from_group_ids=group_ids, effective=True) - domain_ids = list(set([x['domain_id'] for x in assignment_list - if x.get('domain_id')])) + source_from_group_ids=group_ids, effective=True + ) + domain_ids = list( + set( + [x['domain_id'] for x in assignment_list if x.get('domain_id')] + ) + ) return PROVIDERS.resource_api.list_domains_from_ids(domain_ids) def list_projects_for_groups(self, group_ids): assignment_list = self.list_role_assignments( - source_from_group_ids=group_ids, effective=True) - project_ids = list(set([x['project_id'] for x in assignment_list - if x.get('project_id')])) + source_from_group_ids=group_ids, effective=True + ) + project_ids = list( + set( + [ + x['project_id'] + for x in assignment_list + if x.get('project_id') + ] + ) + ) return PROVIDERS.resource_api.list_projects_from_ids(project_ids) @notifications.role_assignment('deleted') - def _remove_role_from_user_and_project_adapter(self, role_id, user_id=None, - group_id=None, - domain_id=None, - project_id=None, - inherited_to_projects=False, - context=None): + def _remove_role_from_user_and_project_adapter( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + context=None, + ): # The parameters for this method must match the parameters for # delete_grant so that the notifications.role_assignment decorator # will work. - self.driver.remove_role_from_user_and_project(user_id, project_id, - role_id) + self.driver.remove_role_from_user_and_project( + user_id, project_id, role_id + ) payload = {'user_id': user_id, 'project_id': project_id} notifications.Audit.internal( - notifications.REMOVE_APP_CREDS_FOR_USER, - payload + notifications.REMOVE_APP_CREDS_FOR_USER, payload ) self._invalidate_token_cache( role_id, group_id, user_id, project_id, domain_id @@ -274,11 +321,13 @@ class Manager(manager.Manager): def remove_role_from_user_and_project(self, user_id, project_id, role_id): self._remove_role_from_user_and_project_adapter( - role_id, user_id=user_id, project_id=project_id) + role_id, user_id=user_id, project_id=project_id + ) COMPUTED_ASSIGNMENTS_REGION.invalidate() - def _invalidate_token_cache(self, role_id, group_id, user_id, project_id, - domain_id): + def _invalidate_token_cache( + self, role_id, group_id, user_id, project_id, domain_id + ): if group_id: actor_type = 'group' actor_id = group_id @@ -296,18 +345,28 @@ class Manager(manager.Manager): reason = ( 'Invalidating the token cache because role %(role_id)s was ' 'removed from %(actor_type)s %(actor_id)s on %(target_type)s ' - '%(target_id)s.' % - {'role_id': role_id, 'actor_type': actor_type, - 'actor_id': actor_id, 'target_type': target_type, - 'target_id': target_id} + '%(target_id)s.' + % { + 'role_id': role_id, + 'actor_type': actor_type, + 'actor_id': actor_id, + 'target_type': target_type, + 'target_id': target_id, + } ) notifications.invalidate_token_cache_notification(reason) @notifications.role_assignment('created') - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False, - initiator=None): + def create_grant( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + initiator=None, + ): role = PROVIDERS.role_api.get_role(role_id) if domain_id: PROVIDERS.resource_api.get_domain(domain_id) @@ -318,47 +377,75 @@ class Manager(manager.Manager): # and role must match if role['domain_id'] and project['domain_id'] != role['domain_id']: raise exception.DomainSpecificRoleMismatch( - role_id=role_id, - project_id=project_id) + role_id=role_id, project_id=project_id + ) self.driver.create_grant( - role_id, user_id=user_id, group_id=group_id, domain_id=domain_id, - project_id=project_id, inherited_to_projects=inherited_to_projects + role_id, + user_id=user_id, + group_id=group_id, + domain_id=domain_id, + project_id=project_id, + inherited_to_projects=inherited_to_projects, ) COMPUTED_ASSIGNMENTS_REGION.invalidate() - def get_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def get_grant( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): role_ref = PROVIDERS.role_api.get_role(role_id) if domain_id: PROVIDERS.resource_api.get_domain(domain_id) if project_id: PROVIDERS.resource_api.get_project(project_id) self.check_grant_role_id( - role_id, user_id=user_id, group_id=group_id, domain_id=domain_id, - project_id=project_id, inherited_to_projects=inherited_to_projects + role_id, + user_id=user_id, + group_id=group_id, + domain_id=domain_id, + project_id=project_id, + inherited_to_projects=inherited_to_projects, ) return role_ref - def list_grants(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): + def list_grants( + self, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + ): if domain_id: PROVIDERS.resource_api.get_domain(domain_id) if project_id: PROVIDERS.resource_api.get_project(project_id) grant_ids = self.list_grant_role_ids( - user_id=user_id, group_id=group_id, domain_id=domain_id, - project_id=project_id, inherited_to_projects=inherited_to_projects + user_id=user_id, + group_id=group_id, + domain_id=domain_id, + project_id=project_id, + inherited_to_projects=inherited_to_projects, ) return PROVIDERS.role_api.list_roles_from_ids(grant_ids) @notifications.role_assignment('deleted') - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False, - initiator=None): + def delete_grant( + self, + role_id, + user_id=None, + group_id=None, + domain_id=None, + project_id=None, + inherited_to_projects=False, + initiator=None, + ): # check if role exist before any processing PROVIDERS.role_api.get_role(role_id) @@ -366,9 +453,12 @@ class Manager(manager.Manager): if group_id is None: # check if role exists on the user before revoke self.check_grant_role_id( - role_id, user_id=user_id, group_id=None, domain_id=domain_id, + role_id, + user_id=user_id, + group_id=None, + domain_id=domain_id, project_id=project_id, - inherited_to_projects=inherited_to_projects + inherited_to_projects=inherited_to_projects, ) self._invalidate_token_cache( role_id, group_id, user_id, project_id, domain_id @@ -377,25 +467,33 @@ class Manager(manager.Manager): try: # check if role exists on the group before revoke self.check_grant_role_id( - role_id, user_id=None, group_id=group_id, - domain_id=domain_id, project_id=project_id, - inherited_to_projects=inherited_to_projects + role_id, + user_id=None, + group_id=group_id, + domain_id=domain_id, + project_id=project_id, + inherited_to_projects=inherited_to_projects, ) if CONF.token.revoke_by_id: self._invalidate_token_cache( role_id, group_id, user_id, project_id, domain_id ) except exception.GroupNotFound: - LOG.debug('Group %s not found, no tokens to invalidate.', - group_id) + LOG.debug( + 'Group %s not found, no tokens to invalidate.', group_id + ) if domain_id: PROVIDERS.resource_api.get_domain(domain_id) if project_id: PROVIDERS.resource_api.get_project(project_id) self.driver.delete_grant( - role_id, user_id=user_id, group_id=group_id, domain_id=domain_id, - project_id=project_id, inherited_to_projects=inherited_to_projects + role_id, + user_id=user_id, + group_id=group_id, + domain_id=domain_id, + project_id=project_id, + inherited_to_projects=inherited_to_projects, ) COMPUTED_ASSIGNMENTS_REGION.invalidate() @@ -406,8 +504,14 @@ class Manager(manager.Manager): # kept as it is in order to detect unnecessarily complex code, which is not # this case. - def _expand_indirect_assignment(self, ref, user_id=None, project_id=None, - subtree_ids=None, expand_groups=True): + def _expand_indirect_assignment( + self, + ref, + user_id=None, + project_id=None, + subtree_ids=None, + expand_groups=True, + ): """Return a list of expanded role assignments. This methods is called for each discovered assignment that either needs @@ -429,6 +533,7 @@ class Manager(manager.Manager): assignments, one for each member of that group. """ + def create_group_assignment(base_ref, user_id): """Create a group assignment from the provided ref.""" ref = copy.deepcopy(base_ref) @@ -483,17 +588,23 @@ class Manager(manager.Manager): # as empty list. try: users = PROVIDERS.identity_api.list_users_in_group( - ref['group_id']) + ref['group_id'] + ) except exception.GroupNotFound: - LOG.warning('Group %(group)s was not found but still has role ' - 'assignments.', {'group': ref['group_id']}) + LOG.warning( + 'Group %(group)s was not found but still has role ' + 'assignments.', + {'group': ref['group_id']}, + ) users = [] - return [create_group_assignment(ref, user_id=m['id']) - for m in users] + return [ + create_group_assignment(ref, user_id=m['id']) for m in users + ] - def expand_inherited_assignment(ref, user_id, project_id, subtree_ids, - expand_groups): + def expand_inherited_assignment( + ref, user_id, project_id, subtree_ids, expand_groups + ): """Expand inherited role assignments. If expand_groups is True and this is a group role assignment on a @@ -543,6 +654,7 @@ class Manager(manager.Manager): main body of the dict and 'parent_id' in the 'indirect' subdict. """ + def create_inherited_assignment(base_ref, project_id): """Create a project assignment from the provided ref. @@ -581,23 +693,29 @@ class Manager(manager.Manager): resource_api = PROVIDERS.resource_api if ref.get('project_id'): if ref['project_id'] in project_ids: - project_ids = ( - [x['id'] for x in - resource_api.list_projects_in_subtree( - ref['project_id'])]) + project_ids = [ + x['id'] + for x in resource_api.list_projects_in_subtree( + ref['project_id'] + ) + ] elif ref.get('domain_id'): # A domain inherited assignment, so apply it to all projects # in this domain - project_ids = ( - [x['id'] for x in - PROVIDERS.resource_api.list_projects_in_domain( - ref['domain_id'])]) + project_ids = [ + x['id'] + for x in PROVIDERS.resource_api.list_projects_in_domain( + ref['domain_id'] + ) + ] else: # It must be a project assignment, so apply it to its subtree - project_ids = ( - [x['id'] for x in - PROVIDERS.resource_api.list_projects_in_subtree( - ref['project_id'])]) + project_ids = [ + x['id'] + for x in PROVIDERS.resource_api.list_projects_in_subtree( + ref['project_id'] + ) + ] new_refs = [] if 'group_id' in ref: @@ -605,23 +723,30 @@ class Manager(manager.Manager): # Expand role assignment to all group members on any # inherited target of any of the projects for ref in expand_group_assignment(ref, user_id): - new_refs += [create_inherited_assignment(ref, proj_id) - for proj_id in project_ids] + new_refs += [ + create_inherited_assignment(ref, proj_id) + for proj_id in project_ids + ] else: # Just place the group assignment on any inherited target # of any of the projects - new_refs += [create_inherited_assignment(ref, proj_id) - for proj_id in project_ids] + new_refs += [ + create_inherited_assignment(ref, proj_id) + for proj_id in project_ids + ] else: # Expand role assignment for all projects - new_refs += [create_inherited_assignment(ref, proj_id) - for proj_id in project_ids] + new_refs += [ + create_inherited_assignment(ref, proj_id) + for proj_id in project_ids + ] return new_refs if ref.get('inherited_to_projects') == 'projects': return expand_inherited_assignment( - ref, user_id, project_id, subtree_ids, expand_groups) + ref, user_id, project_id, subtree_ids, expand_groups + ) elif 'group_id' in ref and expand_groups: return expand_group_assignment(ref, user_id) return [ref] @@ -637,6 +762,7 @@ class Manager(manager.Manager): caller can determine where the assignment came from. """ + def _make_implied_ref_copy(prior_ref, implied_role_id): # Create a ref for an implied role from the ref of a prior role, # setting the new role_id to be the implied role and the indirect @@ -659,13 +785,14 @@ class Manager(manager.Manager): if next_role_id in implied_roles_cache: implied_roles = implied_roles_cache[next_role_id] else: - implied_roles = ( - PROVIDERS.role_api.list_implied_roles(next_role_id)) + implied_roles = PROVIDERS.role_api.list_implied_roles( + next_role_id + ) implied_roles_cache[next_role_id] = implied_roles for implied_role in implied_roles: - implied_ref = ( - _make_implied_ref_copy( - next_ref, implied_role['implied_role_id'])) + implied_ref = _make_implied_ref_copy( + next_ref, implied_role['implied_role_id'] + ) if implied_ref in checked_role_refs: # Avoid traversing a cycle continue @@ -693,9 +820,10 @@ class Manager(manager.Manager): remove any assignments that include a domain role. """ + def _role_is_global(role_id): ref = PROVIDERS.role_api.get_role(role_id) - return (ref['domain_id'] is None) + return ref['domain_id'] is None filter_results = [] for ref in role_refs: @@ -703,10 +831,18 @@ class Manager(manager.Manager): filter_results.append(ref) return filter_results - def _list_effective_role_assignments(self, role_id, user_id, group_id, - domain_id, project_id, subtree_ids, - inherited, source_from_group_ids, - strip_domain_roles): + def _list_effective_role_assignments( + self, + role_id, + user_id, + group_id, + domain_id, + project_id, + subtree_ids, + inherited, + source_from_group_ids, + strip_domain_roles, + ): """List role assignments in effective mode. When using effective mode, besides the direct assignments, the indirect @@ -725,9 +861,16 @@ class Manager(manager.Manager): specified, hence avoiding retrieving a huge list. """ + def list_role_assignments_for_actor( - role_id, inherited, user_id=None, group_ids=None, - project_id=None, subtree_ids=None, domain_id=None): + role_id, + inherited, + user_id=None, + group_ids=None, + project_id=None, + subtree_ids=None, + domain_id=None, + ): """List role assignments for actor on target. List direct and indirect assignments for an actor, optionally @@ -774,9 +917,13 @@ class Manager(manager.Manager): if inherited is False or inherited is None: # Get non inherited assignments non_inherited_refs = self.driver.list_role_assignments( - role_id=role_id, domain_id=domain_id, - project_ids=project_ids_of_interest, user_id=user_id, - group_ids=group_ids, inherited_to_projects=False) + role_id=role_id, + domain_id=domain_id, + project_ids=project_ids_of_interest, + user_id=user_id, + group_ids=group_ids, + inherited_to_projects=False, + ) inherited_refs = [] if inherited is True or inherited is None: @@ -790,31 +937,44 @@ class Manager(manager.Manager): # List inherited assignments from the project's domain proj_domain_id = PROVIDERS.resource_api.get_project( - project_id)['domain_id'] + project_id + )['domain_id'] inherited_refs += self.driver.list_role_assignments( - role_id=role_id, domain_id=proj_domain_id, - user_id=user_id, group_ids=group_ids, - inherited_to_projects=True) + role_id=role_id, + domain_id=proj_domain_id, + user_id=user_id, + group_ids=group_ids, + inherited_to_projects=True, + ) # For inherited assignments from projects, since we know # they are from the same tree the only places these can # come from are from parents of the main project or # inherited assignments on the project or subtree itself. - source_ids = [project['id'] for project in - PROVIDERS.resource_api.list_project_parents( - project_id)] + source_ids = [ + project['id'] + for project in PROVIDERS.resource_api.list_project_parents( + project_id + ) + ] if subtree_ids: source_ids += project_ids_of_interest if source_ids: inherited_refs += self.driver.list_role_assignments( - role_id=role_id, project_ids=source_ids, - user_id=user_id, group_ids=group_ids, - inherited_to_projects=True) + role_id=role_id, + project_ids=source_ids, + user_id=user_id, + group_ids=group_ids, + inherited_to_projects=True, + ) else: # List inherited assignments without filtering by target inherited_refs = self.driver.list_role_assignments( - role_id=role_id, user_id=user_id, group_ids=group_ids, - inherited_to_projects=True) + role_id=role_id, + user_id=user_id, + group_ids=group_ids, + inherited_to_projects=True, + ) return non_inherited_refs + inherited_refs @@ -826,8 +986,10 @@ class Manager(manager.Manager): if user_id and source_from_group_ids: # You can't do both - and since source_from_group_ids is only used # internally, this must be a coding error by the caller. - msg = _('Cannot list assignments sourced from groups and filtered ' - 'by user ID.') + msg = _( + 'Cannot list assignments sourced from groups and filtered ' + 'by user ID.' + ) raise exception.UnexpectedError(msg) # If filtering by domain, then only non-inherited assignments are @@ -839,9 +1001,14 @@ class Manager(manager.Manager): # filtering by role_id and instead return the whole set of roles. # Matching on the specified role is performed at the end. direct_refs = list_role_assignments_for_actor( - role_id=None, user_id=user_id, group_ids=source_from_group_ids, - project_id=project_id, subtree_ids=subtree_ids, - domain_id=domain_id, inherited=inherited) + role_id=None, + user_id=user_id, + group_ids=source_from_group_ids, + project_id=project_id, + subtree_ids=subtree_ids, + domain_id=domain_id, + inherited=inherited, + ) # And those from the user's groups, so long as we are not restricting # to a set of source groups (in which case we already got those @@ -851,16 +1018,21 @@ class Manager(manager.Manager): group_ids = self._get_group_ids_for_user_id(user_id) if group_ids: group_refs = list_role_assignments_for_actor( - role_id=None, project_id=project_id, - subtree_ids=subtree_ids, group_ids=group_ids, - domain_id=domain_id, inherited=inherited) + role_id=None, + project_id=project_id, + subtree_ids=subtree_ids, + group_ids=group_ids, + domain_id=domain_id, + inherited=inherited, + ) # Expand grouping and inheritance on retrieved role assignments refs = [] - expand_groups = (source_from_group_ids is None) - for ref in (direct_refs + group_refs): + expand_groups = source_from_group_ids is None + for ref in direct_refs + group_refs: refs += self._expand_indirect_assignment( - ref, user_id, project_id, subtree_ids, expand_groups) + ref, user_id, project_id, subtree_ids, expand_groups + ) refs = self.add_implied_roles(refs) if strip_domain_roles: @@ -870,9 +1042,17 @@ class Manager(manager.Manager): return refs - def _list_direct_role_assignments(self, role_id, user_id, group_id, system, - domain_id, project_id, subtree_ids, - inherited): + def _list_direct_role_assignments( + self, + role_id, + user_id, + group_id, + system, + domain_id, + project_id, + subtree_ids, + inherited, + ): """List role assignments without applying expansion. Returns a list of direct role assignments, where their attributes match @@ -891,9 +1071,13 @@ class Manager(manager.Manager): project_and_domain_assignments = [] if not system: project_and_domain_assignments = self.driver.list_role_assignments( - role_id=role_id, user_id=user_id, group_ids=group_ids, - domain_id=domain_id, project_ids=project_ids_of_interest, - inherited_to_projects=inherited) + role_id=role_id, + user_id=user_id, + group_ids=group_ids, + domain_id=domain_id, + project_ids=project_ids_of_interest, + inherited_to_projects=inherited, + ) system_assignments = [] if system or (not project_id and not domain_id and not system): @@ -901,17 +1085,21 @@ class Manager(manager.Manager): assignments = self.list_system_grants_for_user(user_id) for assignment in assignments: system_assignments.append( - {'system': {'all': True}, - 'user_id': user_id, - 'role_id': assignment['id']} + { + 'system': {'all': True}, + 'user_id': user_id, + 'role_id': assignment['id'], + } ) elif group_id: assignments = self.list_system_grants_for_group(group_id) for assignment in assignments: system_assignments.append( - {'system': {'all': True}, - 'group_id': group_id, - 'role_id': assignment['id']} + { + 'system': {'all': True}, + 'group_id': group_id, + 'role_id': assignment['id'], + } ) else: assignments = self.list_all_system_grants() @@ -927,24 +1115,33 @@ class Manager(manager.Manager): if role_id: system_assignments = [ - sa for sa in system_assignments - if role_id == sa['role_id'] + sa for sa in system_assignments if role_id == sa['role_id'] ] assignments = [] for assignment in itertools.chain( - project_and_domain_assignments, system_assignments): + project_and_domain_assignments, system_assignments + ): assignments.append(assignment) return assignments @MEMOIZE_COMPUTED_ASSIGNMENTS - def list_role_assignments(self, role_id=None, user_id=None, group_id=None, - system=None, domain_id=None, project_id=None, - include_subtree=False, inherited=None, - effective=None, include_names=False, - source_from_group_ids=None, - strip_domain_roles=True): + def list_role_assignments( + self, + role_id=None, + user_id=None, + group_id=None, + system=None, + domain_id=None, + project_id=None, + include_subtree=False, + inherited=None, + effective=None, + include_names=False, + source_from_group_ids=None, + strip_domain_roles=True, + ): """List role assignments, honoring effective mode and provided filters. Returns a list of role assignments, where their attributes match the @@ -984,23 +1181,39 @@ class Manager(manager.Manager): """ subtree_ids = None if project_id and include_subtree: - subtree_ids = ( - [x['id'] for x in - PROVIDERS.resource_api.list_projects_in_subtree( - project_id)]) + subtree_ids = [ + x['id'] + for x in PROVIDERS.resource_api.list_projects_in_subtree( + project_id + ) + ] if system != 'all': system = None if effective: role_assignments = self._list_effective_role_assignments( - role_id, user_id, group_id, domain_id, project_id, - subtree_ids, inherited, source_from_group_ids, - strip_domain_roles) + role_id, + user_id, + group_id, + domain_id, + project_id, + subtree_ids, + inherited, + source_from_group_ids, + strip_domain_roles, + ) else: role_assignments = self._list_direct_role_assignments( - role_id, user_id, group_id, system, domain_id, project_id, - subtree_ids, inherited) + role_id, + user_id, + group_id, + system, + domain_id, + project_id, + subtree_ids, + inherited, + ) if include_names: return self._get_names_from_role_assignments(role_assignments) @@ -1022,8 +1235,10 @@ class Manager(manager.Manager): # use empty values. _user = PROVIDERS.identity_api.get_user(value) except exception.UserNotFound: - msg = ('User %(user)s not found in the' - ' backend but still has role assignments.') + msg = ( + 'User %(user)s not found in the' + ' backend but still has role assignments.' + ) LOG.warning(msg, {'user': value}) new_assign['user_name'] = '' new_assign['user_domain_id'] = '' @@ -1033,7 +1248,9 @@ class Manager(manager.Manager): new_assign['user_domain_id'] = _user['domain_id'] new_assign['user_domain_name'] = ( PROVIDERS.resource_api.get_domain( - _user['domain_id'])['name']) + _user['domain_id'] + )['name'] + ) elif key == 'group_id': try: # Note(knikolla): Try to get the group, otherwise @@ -1041,8 +1258,10 @@ class Manager(manager.Manager): # use empty values. _group = PROVIDERS.identity_api.get_group(value) except exception.GroupNotFound: - msg = ('Group %(group)s not found in the' - ' backend but still has role assignments.') + msg = ( + 'Group %(group)s not found in the' + ' backend but still has role assignments.' + ) LOG.warning(msg, {'group': value}) new_assign['group_name'] = '' new_assign['group_domain_id'] = '' @@ -1052,14 +1271,18 @@ class Manager(manager.Manager): new_assign['group_domain_id'] = _group['domain_id'] new_assign['group_domain_name'] = ( PROVIDERS.resource_api.get_domain( - _group['domain_id'])['name']) + _group['domain_id'] + )['name'] + ) elif key == 'project_id': _project = PROVIDERS.resource_api.get_project(value) new_assign['project_name'] = _project['name'] new_assign['project_domain_id'] = _project['domain_id'] new_assign['project_domain_name'] = ( PROVIDERS.resource_api.get_domain( - _project['domain_id'])['name']) + _project['domain_id'] + )['name'] + ) elif key == 'role_id': _role = PROVIDERS.role_api.get_role(value) new_assign['role_name'] = _role['name'] @@ -1067,7 +1290,9 @@ class Manager(manager.Manager): new_assign['role_domain_id'] = _role['domain_id'] new_assign['role_domain_name'] = ( PROVIDERS.resource_api.get_domain( - _role['domain_id'])['name']) + _role['domain_id'] + )['name'] + ) role_assign_list.append(new_assign) return role_assign_list @@ -1141,9 +1366,8 @@ class Manager(manager.Manager): if role.get('domain_id'): raise exception.ValidationError( 'Role %(role_id)s is a domain-specific role. Unable to use ' - 'a domain-specific role in a system assignment.' % { - 'role_id': role_id - } + 'a domain-specific role in a system assignment.' + % {'role_id': role_id} ) target_id = self._SYSTEM_SCOPE_TOKEN assignment_type = self._USER_SYSTEM @@ -1214,9 +1438,8 @@ class Manager(manager.Manager): if role.get('domain_id'): raise exception.ValidationError( 'Role %(role_id)s is a domain-specific role. Unable to use ' - 'a domain-specific role in a system assignment.' % { - 'role_id': role_id - } + 'a domain-specific role in a system assignment.' + % {'role_id': role_id} ) target_id = self._SYSTEM_SCOPE_TOKEN assignment_type = self._GROUP_SYSTEM @@ -1270,8 +1493,8 @@ class RoleManager(manager.Manager): # Explicitly load the assignment manager object assignment_driver = CONF.assignment.driver assignment_manager_obj = manager.load_driver( - Manager.driver_namespace, - assignment_driver) + Manager.driver_namespace, assignment_driver + ) role_driver = assignment_manager_obj.default_role_driver() super(RoleManager, self).__init__(role_driver) @@ -1292,8 +1515,7 @@ class RoleManager(manager.Manager): elif len(found_roles) == 1: return {'id': found_roles[0]['id']} else: - raise exception.AmbiguityError(resource='role', - name=role_name) + raise exception.AmbiguityError(resource='role', name=role_name) def create_role(self, role_id, role, initiator=None): # Shallow copy to help mitigate in-line changes that might impact @@ -1321,12 +1543,16 @@ class RoleManager(manager.Manager): original_resource_ref=original_role, new_resource_ref=role, type='role', - resource_id=role_id) + resource_id=role_id, + ) - if ('domain_id' in role and - role['domain_id'] != original_role['domain_id']): + if ( + 'domain_id' in role + and role['domain_id'] != original_role['domain_id'] + ): raise exception.ValidationError( - message=_('Update of `domain_id` is not allowed.')) + message=_('Update of `domain_id` is not allowed.') + ) ret = self.driver.update_role(role_id, role) notifications.Audit.updated(self._ROLE, role_id, initiator) @@ -1336,9 +1562,9 @@ class RoleManager(manager.Manager): def delete_role(self, role_id, initiator=None): role = self.driver.get_role(role_id) # Prevent deletion of immutable roles. - ro_opt.check_immutable_delete(resource_ref=role, - resource_type='role', - resource_id=role_id) + ro_opt.check_immutable_delete( + resource_ref=role, resource_type='role', resource_id=role_id + ) PROVIDERS.assignment_api._send_app_cred_notification_for_role_removal( role_id ) @@ -1363,10 +1589,10 @@ class RoleManager(manager.Manager): raise exception.InvalidImpliedRole(role_id=implied_role_id) if prior_role['domain_id'] is None and implied_role['domain_id']: msg = _('Global role cannot imply a domain-specific role') - raise exception.InvalidImpliedRole(msg, - role_id=implied_role_id) + raise exception.InvalidImpliedRole(msg, role_id=implied_role_id) response = self.driver.create_implied_role( - prior_role_id, implied_role_id) + prior_role_id, implied_role_id + ) COMPUTED_ASSIGNMENTS_REGION.invalidate() return response diff --git a/keystone/assignment/role_backends/sql.py b/keystone/assignment/role_backends/sql.py index ac8ac0fdce..c050bf3aa3 100644 --- a/keystone/assignment/role_backends/sql.py +++ b/keystone/assignment/role_backends/sql.py @@ -41,7 +41,7 @@ class Role(base.RoleDriverBase): # filter_limit_query() below, which will remove the filter from the # hints (hence ensuring our substitution is not exposed to the caller). for f in hints.filters: - if (f['name'] == 'domain_id' and f['value'] is None): + if f['name'] == 'domain_id' and f['value'] is None: f['value'] = base.NULL_DOMAIN_ID with sql.session_for_read() as session: @@ -85,11 +85,15 @@ class Role(base.RoleDriverBase): # Move the "_resource_options" attribute over to the real ref # so that resource_options.resource_options_ref_to_mapper can # handle the work. - setattr(ref, '_resource_options', - getattr(new_role, '_resource_options', {})) + setattr( + ref, + '_resource_options', + getattr(new_role, '_resource_options', {}), + ) # Move options into the propper attribute mapper construct resource_options.resource_options_ref_to_mapper( - ref, sql_model.RoleOption) + ref, sql_model.RoleOption + ) return ref.to_dict() def delete_role(self, role_id): @@ -98,22 +102,28 @@ class Role(base.RoleDriverBase): session.delete(ref) def _get_implied_role(self, session, prior_role_id, implied_role_id): - query = session.query(sql_model.ImpliedRoleTable).filter( - sql_model.ImpliedRoleTable.prior_role_id == prior_role_id).filter( - sql_model.ImpliedRoleTable.implied_role_id == implied_role_id) + query = ( + session.query(sql_model.ImpliedRoleTable) + .filter(sql_model.ImpliedRoleTable.prior_role_id == prior_role_id) + .filter( + sql_model.ImpliedRoleTable.implied_role_id == implied_role_id + ) + ) try: ref = query.one() except sql.NotFound: raise exception.ImpliedRoleNotFound( - prior_role_id=prior_role_id, - implied_role_id=implied_role_id) + prior_role_id=prior_role_id, implied_role_id=implied_role_id + ) return ref @sql.handle_conflicts(conflict_type='implied_role') def create_implied_role(self, prior_role_id, implied_role_id): with sql.session_for_write() as session: - inference = {'prior_role_id': prior_role_id, - 'implied_role_id': implied_role_id} + inference = { + 'prior_role_id': prior_role_id, + 'implied_role_id': implied_role_id, + } ref = sql_model.ImpliedRoleTable.from_dict(inference) try: session.add(ref) @@ -126,15 +136,16 @@ class Role(base.RoleDriverBase): def delete_implied_role(self, prior_role_id, implied_role_id): with sql.session_for_write() as session: - ref = self._get_implied_role(session, prior_role_id, - implied_role_id) + ref = self._get_implied_role( + session, prior_role_id, implied_role_id + ) session.delete(ref) def list_implied_roles(self, prior_role_id): with sql.session_for_read() as session: - query = session.query( - sql_model.ImpliedRoleTable).filter( - sql_model.ImpliedRoleTable.prior_role_id == prior_role_id) + query = session.query(sql_model.ImpliedRoleTable).filter( + sql_model.ImpliedRoleTable.prior_role_id == prior_role_id + ) refs = query.all() return [ref.to_dict() for ref in refs] @@ -146,6 +157,7 @@ class Role(base.RoleDriverBase): def get_implied_role(self, prior_role_id, implied_role_id): with sql.session_for_read() as session: - ref = self._get_implied_role(session, prior_role_id, - implied_role_id) + ref = self._get_implied_role( + session, prior_role_id, implied_role_id + ) return ref.to_dict() diff --git a/keystone/assignment/role_backends/sql_model.py b/keystone/assignment/role_backends/sql_model.py index 624541281e..2ce78dbe6b 100644 --- a/keystone/assignment/role_backends/sql_model.py +++ b/keystone/assignment/role_backends/sql_model.py @@ -22,7 +22,8 @@ class RoleTable(sql.ModelBase, sql.ModelDictMixinWithExtras): def to_dict(self, include_extra_dict=False): d = super(RoleTable, self).to_dict( - include_extra_dict=include_extra_dict) + include_extra_dict=include_extra_dict + ) if d['domain_id'] == base.NULL_DOMAIN_ID: d['domain_id'] = None # NOTE(notmorgan): Eventually it may make sense to drop the empty @@ -56,8 +57,9 @@ class RoleTable(sql.ModelBase, sql.ModelDictMixinWithExtras): resource_options_registry = ro.ROLE_OPTIONS_REGISTRY id = sql.Column(sql.String(64), primary_key=True) name = sql.Column(sql.String(255), nullable=False) - domain_id = sql.Column(sql.String(64), nullable=False, - server_default=base.NULL_DOMAIN_ID) + domain_id = sql.Column( + sql.String(64), nullable=False, server_default=base.NULL_DOMAIN_ID + ) description = sql.Column(sql.String(255), nullable=True) extra = sql.Column(sql.JsonBlob()) _resource_option_mapper = orm.relationship( @@ -66,7 +68,7 @@ class RoleTable(sql.ModelBase, sql.ModelDictMixinWithExtras): cascade='all,delete,delete-orphan', lazy='subquery', backref='role', - collection_class=collections.attribute_mapped_collection('option_id') + collection_class=collections.attribute_mapped_collection('option_id'), ) __table_args__ = (sql.UniqueConstraint('name', 'domain_id'),) @@ -77,11 +79,13 @@ class ImpliedRoleTable(sql.ModelBase, sql.ModelDictMixin): prior_role_id = sql.Column( sql.String(64), sql.ForeignKey('role.id', ondelete="CASCADE"), - primary_key=True) + primary_key=True, + ) implied_role_id = sql.Column( sql.String(64), sql.ForeignKey('role.id', ondelete="CASCADE"), - primary_key=True) + primary_key=True, + ) @classmethod def from_dict(cls, dictionary): @@ -102,11 +106,13 @@ class ImpliedRoleTable(sql.ModelBase, sql.ModelDictMixin): class RoleOption(sql.ModelBase): __tablename__ = 'role_option' - role_id = sql.Column(sql.String(64), - sql.ForeignKey('role.id', ondelete='CASCADE'), - nullable=False, primary_key=True) - option_id = sql.Column(sql.String(4), nullable=False, - primary_key=True) + role_id = sql.Column( + sql.String(64), + sql.ForeignKey('role.id', ondelete='CASCADE'), + nullable=False, + primary_key=True, + ) + option_id = sql.Column(sql.String(4), nullable=False, primary_key=True) option_value = sql.Column(sql.JsonBlob, nullable=True) def __init__(self, option_id, option_value): diff --git a/keystone/assignment/schema.py b/keystone/assignment/schema.py index 92b2f68282..f5d484ff5d 100644 --- a/keystone/assignment/schema.py +++ b/keystone/assignment/schema.py @@ -18,19 +18,19 @@ from keystone.common.validation import parameter_types _role_properties = { 'name': parameter_types.name, 'description': parameter_types.description, - 'options': ro.ROLE_OPTIONS_REGISTRY.json_schema + 'options': ro.ROLE_OPTIONS_REGISTRY.json_schema, } role_create = { 'type': 'object', 'properties': _role_properties, 'required': ['name'], - 'additionalProperties': True + 'additionalProperties': True, } role_update = { 'type': 'object', 'properties': _role_properties, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } diff --git a/keystone/auth/core.py b/keystone/auth/core.py index 7b40a108a3..fe025fe7d1 100644 --- a/keystone/auth/core.py +++ b/keystone/auth/core.py @@ -75,9 +75,9 @@ class AuthContext(dict): """ # identity attributes need to be reconciled among the auth plugins - IDENTITY_ATTRIBUTES = frozenset(['user_id', 'project_id', - 'access_token_id', 'domain_id', - 'expires_at']) + IDENTITY_ATTRIBUTES = frozenset( + ['user_id', 'project_id', 'access_token_id', 'domain_id', 'expires_at'] + ) def __setitem__(self, key, val): """Override __setitem__ to prevent conflicting values.""" @@ -87,20 +87,21 @@ class AuthContext(dict): # special treatment for 'expires_at', we are going to take # the earliest expiration instead. if existing_val != val: - LOG.info('"expires_at" has conflicting values ' - '%(existing)s and %(new)s. Will use the ' - 'earliest value.', - {'existing': existing_val, 'new': val}) + LOG.info( + '"expires_at" has conflicting values ' + '%(existing)s and %(new)s. Will use the ' + 'earliest value.', + {'existing': existing_val, 'new': val}, + ) if existing_val is None or val is None: val = existing_val or val else: val = min(existing_val, val) elif existing_val != val: - msg = _('Unable to reconcile identity attribute %(attribute)s ' - 'as it has conflicting values %(new)s and %(old)s') % ( - {'attribute': key, - 'new': val, - 'old': existing_val}) + msg = _( + 'Unable to reconcile identity attribute %(attribute)s ' + 'as it has conflicting values %(new)s and %(old)s' + ) % ({'attribute': key, 'new': val, 'old': existing_val}) raise exception.Unauthorized(msg) return super(AuthContext, self).__setitem__(key, val) @@ -141,8 +142,8 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): # ensure the project is enabled try: PROVIDERS.resource_api.assert_project_enabled( - project_id=project_ref['id'], - project=project_ref) + project_id=project_ref['id'], project=project_ref + ) except AssertionError as e: LOG.warning(e) raise exception.Unauthorized from e @@ -150,8 +151,8 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): def _assert_domain_is_enabled(self, domain_ref): try: PROVIDERS.resource_api.assert_domain_enabled( - domain_id=domain_ref['id'], - domain=domain_ref) + domain_id=domain_ref['id'], domain=domain_ref + ) except AssertionError as e: LOG.warning(e) raise exception.Unauthorized from e @@ -161,15 +162,19 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): domain_name = domain_info.get('name') try: if domain_name: - if (CONF.resource.domain_name_url_safe == 'strict' and - utils.is_not_url_safe(domain_name)): + if ( + CONF.resource.domain_name_url_safe == 'strict' + and utils.is_not_url_safe(domain_name) + ): msg = 'Domain name cannot contain reserved characters.' - tr_msg = _('Domain name cannot contain reserved ' - 'characters.') + tr_msg = _( + 'Domain name cannot contain reserved ' 'characters.' + ) LOG.warning(msg) raise exception.Unauthorized(message=tr_msg) domain_ref = PROVIDERS.resource_api.get_domain_by_name( - domain_name) + domain_name + ) else: domain_ref = PROVIDERS.resource_api.get_domain(domain_id) except exception.DomainNotFound as e: @@ -183,19 +188,24 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): project_name = project_info.get('name') try: if project_name: - if (CONF.resource.project_name_url_safe == 'strict' and - utils.is_not_url_safe(project_name)): + if ( + CONF.resource.project_name_url_safe == 'strict' + and utils.is_not_url_safe(project_name) + ): msg = 'Project name cannot contain reserved characters.' - tr_msg = _('Project name cannot contain reserved ' - 'characters.') + tr_msg = _( + 'Project name cannot contain reserved ' 'characters.' + ) LOG.warning(msg) raise exception.Unauthorized(message=tr_msg) if 'domain' not in project_info: - raise exception.ValidationError(attribute='domain', - target='project') + raise exception.ValidationError( + attribute='domain', target='project' + ) domain_ref = self._lookup_domain(project_info['domain']) project_ref = PROVIDERS.resource_api.get_project_by_name( - project_name, domain_ref['id']) + project_name, domain_ref['id'] + ) else: project_ref = PROVIDERS.resource_api.get_project(project_id) domain_id = project_ref['domain_id'] @@ -214,8 +224,9 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): def _lookup_trust(self, trust_info): trust_id = trust_info.get('id') if not trust_id: - raise exception.ValidationError(attribute='trust_id', - target='trust') + raise exception.ValidationError( + attribute='trust_id', target='trust' + ) trust = PROVIDERS.trust_api.get_trust(trust_id) return trust @@ -228,25 +239,28 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): return get_app_cred(app_cred_id) name = app_cred_info.get('name') if not name: - raise exception.ValidationError(attribute='name or ID', - target='application credential') + raise exception.ValidationError( + attribute='name or ID', target='application credential' + ) user = app_cred_info.get('user') if not user: - raise exception.ValidationError(attribute='user', - target='application credential') + raise exception.ValidationError( + attribute='user', target='application credential' + ) user_id = user.get('id') if not user_id: if 'domain' not in user: - raise exception.ValidationError(attribute='domain', - target='user') + raise exception.ValidationError( + attribute='domain', target='user' + ) domain_ref = self._lookup_domain(user['domain']) user_id = PROVIDERS.identity_api.get_user_by_name( - user['name'], domain_ref['id'])['id'] + user['name'], domain_ref['id'] + )['id'] hints = driver_hints.Hints() hints.add_filter('name', name) app_cred_api = PROVIDERS.application_credential_api - app_creds = app_cred_api.list_application_credentials( - user_id, hints) + app_creds = app_cred_api.list_application_credentials(user_id, hints) if len(app_creds) != 1: message = "Could not find application credential: %s" % name tr_message = _("Could not find application credential: %s") % name @@ -267,17 +281,26 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): if 'scope' in self.auth: detail = "Application credentials cannot request a scope." raise exception.ApplicationCredentialAuthError( - detail=detail) + detail=detail + ) self._set_scope_from_app_cred( - self.auth['identity']['application_credential']) + self.auth['identity']['application_credential'] + ) return if 'scope' not in self.auth: return - if sum(['project' in self.auth['scope'], - 'domain' in self.auth['scope'], - 'unscoped' in self.auth['scope'], - 'system' in self.auth['scope'], - 'OS-TRUST:trust' in self.auth['scope']]) != 1: + if ( + sum( + [ + 'project' in self.auth['scope'], + 'domain' in self.auth['scope'], + 'unscoped' in self.auth['scope'], + 'system' in self.auth['scope'], + 'OS-TRUST:trust' in self.auth['scope'], + ] + ) + != 1 + ): msg = 'system, project, domain, OS-TRUST:trust or unscoped' raise exception.ValidationError(attribute=msg, target='scope') if 'system' in self.auth['scope']: @@ -294,13 +317,19 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): self._scope_data = (domain_ref['id'], None, None, None, None) elif 'OS-TRUST:trust' in self.auth['scope']: trust_ref = self._lookup_trust( - self.auth['scope']['OS-TRUST:trust']) + self.auth['scope']['OS-TRUST:trust'] + ) # TODO(ayoung): when trusts support domains, fill in domain data if trust_ref.get('project_id') is not None: project_ref = self._lookup_project( - {'id': trust_ref['project_id']}) + {'id': trust_ref['project_id']} + ) self._scope_data = ( - None, project_ref['id'], trust_ref, None, None + None, + project_ref['id'], + trust_ref, + None, + None, ) else: @@ -310,8 +339,9 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): # make sure all the method data/payload are provided for method_name in self.get_method_names(): if method_name not in self.auth['identity']: - raise exception.ValidationError(attribute=method_name, - target='identity') + raise exception.ValidationError( + attribute=method_name, target='identity' + ) # make sure auth method is supported for method_name in self.get_method_names(): @@ -327,8 +357,9 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): """ # make sure "auth" exist if not self.auth: - raise exception.ValidationError(attribute='auth', - target='request body') + raise exception.ValidationError( + attribute='auth', target='request body' + ) # NOTE(chioleong): Tokenless auth does not provide auth methods, # we only care about using this method to validate the scope @@ -360,8 +391,9 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): """ if method not in self.auth['identity']['methods']: - raise exception.ValidationError(attribute=method, - target='identity') + raise exception.ValidationError( + attribute=method, target='identity' + ) return self.auth['identity'][method] def get_scope(self): @@ -386,8 +418,14 @@ class AuthInfo(provider_api.ProviderAPIMixin, object): """ return self._scope_data - def set_scope(self, domain_id=None, project_id=None, trust=None, - unscoped=None, system=None): + def set_scope( + self, + domain_id=None, + project_id=None, + trust=None, + unscoped=None, + system=None, + ): """Set scope information.""" if domain_id and project_id: msg = _('Scoping to both domain and project is not allowed') @@ -430,16 +468,21 @@ class UserMFARulesValidator(provider_api.ProviderAPIMixin, object): user_ref = PROVIDERS.identity_api.get_user(user_id) mfa_rules = user_ref['options'].get(ro.MFA_RULES_OPT.option_name, []) mfa_rules_enabled = user_ref['options'].get( - ro.MFA_ENABLED_OPT.option_name, True) + ro.MFA_ENABLED_OPT.option_name, True + ) rules = cls._parse_rule_structure(mfa_rules, user_ref['id']) if not rules or not mfa_rules_enabled: # return quickly if the rules are disabled for the user or not set - LOG.debug('MFA Rules not processed for user `%(user_id)s`. ' - 'Rule list: `%(rules)s` (Enabled: `%(enabled)s`).', - {'user_id': user_id, - 'rules': mfa_rules, - 'enabled': mfa_rules_enabled}) + LOG.debug( + 'MFA Rules not processed for user `%(user_id)s`. ' + 'Rule list: `%(rules)s` (Enabled: `%(enabled)s`).', + { + 'user_id': user_id, + 'rules': mfa_rules, + 'enabled': mfa_rules_enabled, + }, + ) return True for r in rules: @@ -451,20 +494,24 @@ class UserMFARulesValidator(provider_api.ProviderAPIMixin, object): r_set = set(r).intersection(cls._auth_methods()) if set(auth_methods).issuperset(r_set): # Rule Matches no need to continue, return here. - LOG.debug('Auth methods for user `%(user_id)s`, `%(methods)s` ' - 'matched MFA rule `%(rule)s`. Loaded ' - 'auth_methods: `%(loaded)s`', - {'user_id': user_id, - 'rule': list(r_set), - 'methods': auth_methods, - 'loaded': cls._auth_methods()}) + LOG.debug( + 'Auth methods for user `%(user_id)s`, `%(methods)s` ' + 'matched MFA rule `%(rule)s`. Loaded ' + 'auth_methods: `%(loaded)s`', + { + 'user_id': user_id, + 'rule': list(r_set), + 'methods': auth_methods, + 'loaded': cls._auth_methods(), + }, + ) return True - LOG.debug('Auth methods for user `%(user_id)s`, `%(methods)s` did not ' - 'match a MFA rule in `%(rules)s`.', - {'user_id': user_id, - 'methods': auth_methods, - 'rules': rules}) + LOG.debug( + 'Auth methods for user `%(user_id)s`, `%(methods)s` did not ' + 'match a MFA rule in `%(rules)s`.', + {'user_id': user_id, 'methods': auth_methods, 'rules': rules}, + ) return False @staticmethod @@ -488,9 +535,11 @@ class UserMFARulesValidator(provider_api.ProviderAPIMixin, object): # processing. rule_set = [] if not isinstance(rules, list): - LOG.error('Corrupt rule data structure for user %(user_id)s, ' - 'no rules loaded.', - {'user_id': user_id}) + LOG.error( + 'Corrupt rule data structure for user %(user_id)s, ' + 'no rules loaded.', + {'user_id': user_id}, + ) # Corrupt Data means no rules. Auth success > MFA rules in this # case. return rule_set @@ -502,9 +551,11 @@ class UserMFARulesValidator(provider_api.ProviderAPIMixin, object): if not isinstance(r_list, list): # Rule was not a list, it is invalid, drop the rule from # being considered. - LOG.info('Ignoring Rule %(type)r; rule must be a list of ' - 'strings.', - {'type': type(r_list)}) + LOG.info( + 'Ignoring Rule %(type)r; rule must be a list of ' + 'strings.', + {'type': type(r_list)}, + ) continue if r_list: @@ -514,9 +565,11 @@ class UserMFARulesValidator(provider_api.ProviderAPIMixin, object): if not isinstance(item, str): # Rules may only contain strings for method names # Reject a rule with non-string values - LOG.info('Ignoring Rule %(rule)r; rule contains ' - 'non-string values.', - {'rule': r_list}) + LOG.info( + 'Ignoring Rule %(rule)r; rule contains ' + 'non-string values.', + {'rule': r_list}, + ) # Rule is known to be bad, drop it from consideration. _ok_rule = False break diff --git a/keystone/auth/plugins/application_credential.py b/keystone/auth/plugins/application_credential.py index 54a7af2557..4682a39dde 100644 --- a/keystone/auth/plugins/application_credential.py +++ b/keystone/auth/plugins/application_credential.py @@ -26,16 +26,19 @@ class ApplicationCredential(base.AuthMethodHandler): def authenticate(self, auth_payload): """Authenticate an application.""" response_data = {} - app_cred_info = auth_plugins.AppCredInfo.create(auth_payload, - METHOD_NAME) + app_cred_info = auth_plugins.AppCredInfo.create( + auth_payload, METHOD_NAME + ) try: PROVIDERS.application_credential_api.authenticate( application_credential_id=app_cred_info.id, - secret=app_cred_info.secret) + secret=app_cred_info.secret, + ) except AssertionError as e: raise exception.Unauthorized(e) response_data['user_id'] = app_cred_info.user_id - return base.AuthHandlerResponse(status=True, response_body=None, - response_data=response_data) + return base.AuthHandlerResponse( + status=True, response_body=None, response_data=response_data + ) diff --git a/keystone/auth/plugins/base.py b/keystone/auth/plugins/base.py index b8ceb65ef7..7f6fa0347a 100644 --- a/keystone/auth/plugins/base.py +++ b/keystone/auth/plugins/base.py @@ -20,11 +20,13 @@ from keystone import exception AuthHandlerResponse = collections.namedtuple( - 'AuthHandlerResponse', 'status, response_body, response_data') + 'AuthHandlerResponse', 'status, response_body, response_data' +) -class AuthMethodHandler(provider_api.ProviderAPIMixin, object, - metaclass=abc.ABCMeta): +class AuthMethodHandler( + provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta +): """Abstract base class for an authentication plugin.""" def __init__(self): diff --git a/keystone/auth/plugins/core.py b/keystone/auth/plugins/core.py index 1ae451df33..18e1b7e24d 100644 --- a/keystone/auth/plugins/core.py +++ b/keystone/auth/plugins/core.py @@ -28,8 +28,9 @@ CONF = keystone.conf.CONF LOG = log.getLogger(__name__) PROVIDERS = provider_api.ProviderAPIs _NOTIFY_OP = 'authenticate' -_NOTIFY_EVENT = '{service}.{event}'.format(service=notifications.SERVICE, - event=_NOTIFY_OP) +_NOTIFY_EVENT = '{service}.{event}'.format( + service=notifications.SERVICE, event=_NOTIFY_OP +) def construct_method_map_from_config(): @@ -115,8 +116,8 @@ class BaseUserInfo(provider_api.ProviderAPIMixin, object): def _assert_domain_is_enabled(self, domain_ref): try: PROVIDERS.resource_api.assert_domain_enabled( - domain_id=domain_ref['id'], - domain=domain_ref) + domain_id=domain_ref['id'], domain=domain_ref + ) except AssertionError as e: LOG.warning(e) raise exception.Unauthorized from e @@ -124,8 +125,8 @@ class BaseUserInfo(provider_api.ProviderAPIMixin, object): def _assert_user_is_enabled(self, user_ref): try: PROVIDERS.identity_api.assert_user_enabled( - user_id=user_ref['id'], - user=user_ref) + user_id=user_ref['id'], user=user_ref + ) except AssertionError as e: LOG.warning(e) raise exception.Unauthorized from e @@ -134,12 +135,14 @@ class BaseUserInfo(provider_api.ProviderAPIMixin, object): domain_id = domain_info.get('id') domain_name = domain_info.get('name') if not domain_id and not domain_name: - raise exception.ValidationError(attribute='id or name', - target='domain') + raise exception.ValidationError( + attribute='id or name', target='domain' + ) try: if domain_name: domain_ref = PROVIDERS.resource_api.get_domain_by_name( - domain_name) + domain_name + ) else: domain_ref = PROVIDERS.resource_api.get_domain(domain_id) except exception.DomainNotFound as e: @@ -150,27 +153,32 @@ class BaseUserInfo(provider_api.ProviderAPIMixin, object): def _validate_and_normalize_auth_data(self, auth_payload): if 'user' not in auth_payload: - raise exception.ValidationError(attribute='user', - target=self.METHOD_NAME) + raise exception.ValidationError( + attribute='user', target=self.METHOD_NAME + ) user_info = auth_payload['user'] user_id = user_info.get('id') user_name = user_info.get('name') domain_ref = {} if not user_id and not user_name: - raise exception.ValidationError(attribute='id or name', - target='user') + raise exception.ValidationError( + attribute='id or name', target='user' + ) try: if user_name: if 'domain' not in user_info: - raise exception.ValidationError(attribute='domain', - target='user') + raise exception.ValidationError( + attribute='domain', target='user' + ) domain_ref = self._lookup_domain(user_info['domain']) user_ref = PROVIDERS.identity_api.get_user_by_name( - user_name, domain_ref['id']) + user_name, domain_ref['id'] + ) else: user_ref = PROVIDERS.identity_api.get_user(user_id) domain_ref = PROVIDERS.resource_api.get_domain( - user_ref['domain_id']) + user_ref['domain_id'] + ) self._assert_domain_is_enabled(domain_ref) except exception.UserNotFound as e: LOG.warning(e) @@ -196,7 +204,8 @@ class BaseUserInfo(provider_api.ProviderAPIMixin, object): outcome=taxonomy.OUTCOME_FAILURE, target=resource.Resource(typeURI=taxonomy.ACCOUNT_USER), event_type=_NOTIFY_EVENT, - reason=audit_reason) + reason=audit_reason, + ) raise exception.Unauthorized(e) self._assert_user_is_enabled(user_ref) self.user_ref = user_ref @@ -212,7 +221,8 @@ class UserAuthInfo(BaseUserInfo): def _validate_and_normalize_auth_data(self, auth_payload): super(UserAuthInfo, self)._validate_and_normalize_auth_data( - auth_payload) + auth_payload + ) user_info = auth_payload['user'] self.password = user_info.get('password') @@ -225,7 +235,8 @@ class TOTPUserInfo(BaseUserInfo): def _validate_and_normalize_auth_data(self, auth_payload): super(TOTPUserInfo, self)._validate_and_normalize_auth_data( - auth_payload) + auth_payload + ) user_info = auth_payload['user'] self.passcode = user_info.get('passcode') @@ -240,23 +251,28 @@ class AppCredInfo(BaseUserInfo): app_cred_api = PROVIDERS.application_credential_api if auth_payload.get('id'): app_cred = app_cred_api.get_application_credential( - auth_payload['id']) + auth_payload['id'] + ) self.user_id = app_cred['user_id'] if not auth_payload.get('user'): auth_payload['user'] = {} auth_payload['user']['id'] = self.user_id super(AppCredInfo, self)._validate_and_normalize_auth_data( - auth_payload) + auth_payload + ) elif auth_payload.get('name'): super(AppCredInfo, self)._validate_and_normalize_auth_data( - auth_payload) + auth_payload + ) hints = driver_hints.Hints() hints.add_filter('name', auth_payload['name']) app_cred = app_cred_api.list_application_credentials( - self.user_id, hints)[0] + self.user_id, hints + )[0] auth_payload['id'] = app_cred['id'] else: - raise exception.ValidationError(attribute='id or name', - target='application credential') + raise exception.ValidationError( + attribute='id or name', target='application credential' + ) self.id = auth_payload['id'] self.secret = auth_payload.get('secret') diff --git a/keystone/auth/plugins/external.py b/keystone/auth/plugins/external.py index 4fe896714d..a6b4c13d02 100644 --- a/keystone/auth/plugins/external.py +++ b/keystone/auth/plugins/external.py @@ -48,8 +48,9 @@ class Base(base.AuthMethodHandler, metaclass=abc.ABCMeta): raise exception.Unauthorized(msg) response_data['user_id'] = user_ref['id'] - return base.AuthHandlerResponse(status=True, response_body=None, - response_data=response_data) + return base.AuthHandlerResponse( + status=True, response_body=None, response_data=response_data + ) @abc.abstractmethod def _authenticate(self): @@ -64,8 +65,8 @@ class DefaultDomain(Base): def _authenticate(self): """Use remote_user to look up the user in the identity backend.""" return PROVIDERS.identity_api.get_user_by_name( - flask.request.remote_user, - CONF.identity.default_domain_id) + flask.request.remote_user, CONF.identity.default_domain_id + ) class Domain(Base): @@ -83,7 +84,8 @@ class Domain(Base): domain_id = CONF.identity.default_domain_id return PROVIDERS.identity_api.get_user_by_name( - flask.request.remote_user, domain_id) + flask.request.remote_user, domain_id + ) class KerberosDomain(Domain): diff --git a/keystone/auth/plugins/mapped.py b/keystone/auth/plugins/mapped.py index 23ba337246..b9e4d95bd5 100644 --- a/keystone/auth/plugins/mapped.py +++ b/keystone/auth/plugins/mapped.py @@ -52,19 +52,22 @@ class Mapped(base.AuthMethodHandler): """ if 'id' in auth_payload: token_ref = self._get_token_ref(auth_payload) - response_data = handle_scoped_token(token_ref, - PROVIDERS.federation_api, - PROVIDERS.identity_api) + response_data = handle_scoped_token( + token_ref, PROVIDERS.federation_api, PROVIDERS.identity_api + ) else: - response_data = handle_unscoped_token(auth_payload, - PROVIDERS.resource_api, - PROVIDERS.federation_api, - PROVIDERS.identity_api, - PROVIDERS.assignment_api, - PROVIDERS.role_api) + response_data = handle_unscoped_token( + auth_payload, + PROVIDERS.resource_api, + PROVIDERS.federation_api, + PROVIDERS.identity_api, + PROVIDERS.assignment_api, + PROVIDERS.role_api, + ) - return base.AuthHandlerResponse(status=True, response_body=None, - response_data=response_data) + return base.AuthHandlerResponse( + status=True, response_body=None, response_data=response_data + ) def handle_scoped_token(token, federation_api, identity_api): @@ -78,15 +81,21 @@ def handle_scoped_token(token, federation_api, identity_api): for group_dict in token.federated_groups: group_ids.append(group_dict['id']) send_notification = functools.partial( - notifications.send_saml_audit_notification, 'authenticate', - user_id, group_ids, identity_provider, protocol, - token_audit_id) + notifications.send_saml_audit_notification, + 'authenticate', + user_id, + group_ids, + identity_provider, + protocol, + token_audit_id, + ) utils.assert_enabled_identity_provider(federation_api, identity_provider) try: mapping = federation_api.get_mapping_from_idp_and_protocol( - identity_provider, protocol) + identity_provider, protocol + ) utils.validate_mapped_group_ids(group_ids, mapping['id'], identity_api) except Exception: @@ -106,8 +115,7 @@ def handle_scoped_token(token, federation_api, identity_api): return response_data -def configure_project_domain(shadow_project, idp_domain_id, - resource_api): +def configure_project_domain(shadow_project, idp_domain_id, resource_api): """Configure federated projects domain. We set the domain to be the default (idp_domain_id) if the project @@ -119,16 +127,23 @@ def configure_project_domain(shadow_project, idp_domain_id, db_domain = resource_api.get_domain_by_name(domain['name']) domain = {"id": db_domain.get('id')} shadow_project['domain'] = domain - LOG.debug('Project [%s] domain ID was resolved to [%s]', - shadow_project['name'], shadow_project['domain']['id']) + LOG.debug( + 'Project [%s] domain ID was resolved to [%s]', + shadow_project['name'], + shadow_project['domain']['id'], + ) -def handle_projects_from_mapping(shadow_projects, idp_domain_id, - existing_roles, user, assignment_api, - resource_api): +def handle_projects_from_mapping( + shadow_projects, + idp_domain_id, + existing_roles, + user, + assignment_api, + resource_api, +): for shadow_project in shadow_projects: - configure_project_domain( - shadow_project, idp_domain_id, resource_api) + configure_project_domain(shadow_project, idp_domain_id, resource_api) try: # Check and see if the project already exists and if it # does not, try to create it. @@ -139,32 +154,40 @@ def handle_projects_from_mapping(shadow_projects, idp_domain_id, LOG.info( 'Project %(project_name)s does not exist. It will be ' 'automatically provisioning for user %(user_id)s.', - {'project_name': shadow_project['name'], - 'user_id': user['id']} + { + 'project_name': shadow_project['name'], + 'user_id': user['id'], + }, ) project_ref = { 'id': uuid.uuid4().hex, 'name': shadow_project['name'], - 'domain_id': shadow_project['domain']['id'] + 'domain_id': shadow_project['domain']['id'], } project = resource_api.create_project( - project_ref['id'], - project_ref + project_ref['id'], project_ref ) shadow_roles = shadow_project['roles'] for shadow_role in shadow_roles: assignment_api.create_grant( existing_roles[shadow_role['name']]['id'], user_id=user['id'], - project_id=project['id'] + project_id=project['id'], ) -def handle_unscoped_token(auth_payload, resource_api, federation_api, - identity_api, assignment_api, role_api): +def handle_unscoped_token( + auth_payload, + resource_api, + federation_api, + identity_api, + assignment_api, + role_api, +): - def validate_shadow_mapping(shadow_projects, existing_roles, - user_domain_id, idp_id): + def validate_shadow_mapping( + shadow_projects, existing_roles, user_domain_id, idp_id + ): # Validate that the roles in the shadow mapping actually exist. If # they don't we should bail early before creating anything. for shadow_project in shadow_projects: @@ -176,30 +199,35 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api, 'Role %s was specified in the mapping but does ' 'not exist. All roles specified in a mapping must ' 'exist before assignment.', - shadow_role['name'] + shadow_role['name'], ) # NOTE(lbragstad): The RoleNotFound exception usually # expects a role_id as the parameter, but in this case we # only have a name so we'll pass that instead. raise exception.RoleNotFound(shadow_role['name']) role = existing_roles[shadow_role['name']] - if (role['domain_id'] is not None and - role['domain_id'] != user_domain_id): + if ( + role['domain_id'] is not None + and role['domain_id'] != user_domain_id + ): LOG.error( 'Role %(role)s is a domain-specific role and ' 'cannot be assigned within %(domain)s.', - {'role': shadow_role['name'], 'domain': user_domain_id} + { + 'role': shadow_role['name'], + 'domain': user_domain_id, + }, ) raise exception.DomainSpecificRoleNotWithinIdPDomain( - role_name=shadow_role['name'], - identity_provider=idp_id + role_name=shadow_role['name'], identity_provider=idp_id ) def is_ephemeral_user(mapped_properties): return mapped_properties['user']['type'] == utils.UserType.EPHEMERAL - def build_ephemeral_user_context(user, mapped_properties, - identity_provider, protocol): + def build_ephemeral_user_context( + user, mapped_properties, identity_provider, protocol + ): resp = {} resp['user_id'] = user['id'] resp['group_ids'] = mapped_properties['group_ids'] @@ -210,8 +238,9 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api, def build_local_user_context(mapped_properties): resp = {} - user_info = auth_plugins.UserAuthInfo.create(mapped_properties, - METHOD_NAME) + user_info = auth_plugins.UserAuthInfo.create( + mapped_properties, METHOD_NAME + ) resp['user_id'] = user_info.user_id return resp @@ -221,12 +250,12 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api, identity_provider = auth_payload['identity_provider'] except KeyError: raise exception.ValidationError( - attribute='identity_provider', target='mapped') + attribute='identity_provider', target='mapped' + ) try: protocol = auth_payload['protocol'] except KeyError: - raise exception.ValidationError( - attribute='protocol', target='mapped') + raise exception.ValidationError(attribute='protocol', target='mapped') utils.assert_enabled_identity_provider(federation_api, identity_provider) @@ -242,24 +271,33 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api, try: try: mapped_properties, mapping_id = apply_mapping_filter( - identity_provider, protocol, assertion, resource_api, - federation_api, identity_api) + identity_provider, + protocol, + assertion, + resource_api, + federation_api, + identity_api, + ) except exception.ValidationError as e: # if mapping is either invalid or yield no valid identity, # it is considered a failed authentication raise exception.Unauthorized(e) if is_ephemeral_user(mapped_properties): - idp_domain_id = federation_api.get_idp( - identity_provider)['domain_id'] + idp_domain_id = federation_api.get_idp(identity_provider)[ + 'domain_id' + ] - validate_and_prepare_federated_user(mapped_properties, - idp_domain_id, resource_api) + validate_and_prepare_federated_user( + mapped_properties, idp_domain_id, resource_api + ) user = identity_api.shadow_federated_user( identity_provider, - protocol, mapped_properties['user'], - group_ids=mapped_properties['group_ids']) + protocol, + mapped_properties['user'], + group_ids=mapped_properties['group_ids'], + ) if 'projects' in mapped_properties: @@ -276,7 +314,7 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api, mapped_properties['projects'], existing_roles, mapped_properties['user']['domain']['id'], - identity_provider + identity_provider, ) handle_projects_from_mapping( mapped_properties['projects'], @@ -284,13 +322,14 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api, existing_roles, user, assignment_api, - resource_api + resource_api, ) user_id = user['id'] group_ids = mapped_properties['group_ids'] response_data = build_ephemeral_user_context( - user, mapped_properties, identity_provider, protocol) + user, mapped_properties, identity_provider, protocol + ) else: response_data = build_local_user_context(mapped_properties) @@ -299,19 +338,27 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api, # send off failed authentication notification, raise the exception # after sending the notification outcome = taxonomy.OUTCOME_FAILURE - notifications.send_saml_audit_notification('authenticate', - user_id, group_ids, - identity_provider, - protocol, token_id, - outcome) + notifications.send_saml_audit_notification( + 'authenticate', + user_id, + group_ids, + identity_provider, + protocol, + token_id, + outcome, + ) raise else: outcome = taxonomy.OUTCOME_SUCCESS - notifications.send_saml_audit_notification('authenticate', - user_id, group_ids, - identity_provider, - protocol, token_id, - outcome) + notifications.send_saml_audit_notification( + 'authenticate', + user_id, + group_ids, + identity_provider, + protocol, + token_id, + outcome, + ) return response_data @@ -321,13 +368,20 @@ def extract_assertion_data(): return assertion -def apply_mapping_filter(identity_provider, protocol, assertion, - resource_api, federation_api, identity_api): +def apply_mapping_filter( + identity_provider, + protocol, + assertion, + resource_api, + federation_api, + identity_api, +): idp = federation_api.get_idp(identity_provider) utils.validate_idp(idp, protocol, assertion) mapped_properties, mapping_id = federation_api.evaluate( - identity_provider, protocol, assertion) + identity_provider, protocol, assertion + ) # NOTE(marek-denis): We update group_ids only here to avoid fetching # groups identified by name/domain twice. @@ -339,14 +393,19 @@ def apply_mapping_filter(identity_provider, protocol, assertion, utils.validate_mapped_group_ids(group_ids, mapping_id, identity_api) group_ids.extend( utils.transform_to_group_ids( - mapped_properties['group_names'], mapping_id, - identity_api, resource_api)) + mapped_properties['group_names'], + mapping_id, + identity_api, + resource_api, + ) + ) mapped_properties['group_ids'] = list(set(group_ids)) return mapped_properties, mapping_id def validate_and_prepare_federated_user( - mapped_properties, idp_domain_id, resource_api): + mapped_properties, idp_domain_id, resource_api +): """Setup federated username. Function covers all the cases for properly setting user id, a primary @@ -386,9 +445,11 @@ def validate_and_prepare_federated_user( user_name = user.get('name') or flask.request.remote_user if not any([user_id, user_name]): - msg = _("Could not map user while setting ephemeral user identity. " - "Either mapping rules must specify user id/name or " - "REMOTE_USER environment variable must be set.") + msg = _( + "Could not map user while setting ephemeral user identity. " + "Either mapping rules must specify user id/name or " + "REMOTE_USER environment variable must be set." + ) raise exception.Unauthorized(msg) elif not user_name: @@ -408,5 +469,8 @@ def validate_and_prepare_federated_user( domain = {"id": db_domain.get('id')} user['domain'] = domain - LOG.debug('User [%s] domain ID was resolved to [%s]', user['name'], - user['domain']['id']) + LOG.debug( + 'User [%s] domain ID was resolved to [%s]', + user['name'], + user['domain']['id'], + ) diff --git a/keystone/auth/plugins/oauth1.py b/keystone/auth/plugins/oauth1.py index a052315b93..c3c8a89816 100644 --- a/keystone/auth/plugins/oauth1.py +++ b/keystone/auth/plugins/oauth1.py @@ -36,7 +36,8 @@ class OAuth(base.AuthMethodHandler): if not access_token_id: raise exception.ValidationError( - attribute='oauth_token', target='request') + attribute='oauth_token', target='request' + ) acc_token = PROVIDERS.oauth_api.get_access_token(access_token_id) @@ -44,20 +45,22 @@ class OAuth(base.AuthMethodHandler): if expires_at: now = timeutils.utcnow() expires = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) + timeutils.parse_isotime(expires_at) + ) if now > expires: raise exception.Unauthorized(_('Access token is expired')) url = ks_flask.base_url(path=flask.request.path) access_verifier = oauth.ResourceEndpoint( request_validator=validator.OAuthValidator(), - token_generator=oauth.token_generator) + token_generator=oauth.token_generator, + ) result, request = access_verifier.validate_protected_resource_request( url, http_method='POST', body=flask.request.args, headers=dict(flask.request.headers), - realms=None + realms=None, ) if not result: msg = _('Could not validate the access token') @@ -66,5 +69,6 @@ class OAuth(base.AuthMethodHandler): response_data['access_token_id'] = access_token_id response_data['project_id'] = acc_token['project_id'] - return base.AuthHandlerResponse(status=True, response_body=None, - response_data=response_data) + return base.AuthHandlerResponse( + status=True, response_body=None, response_data=response_data + ) diff --git a/keystone/auth/plugins/password.py b/keystone/auth/plugins/password.py index 13f2949272..be2637310c 100644 --- a/keystone/auth/plugins/password.py +++ b/keystone/auth/plugins/password.py @@ -32,8 +32,8 @@ class Password(base.AuthMethodHandler): try: PROVIDERS.identity_api.authenticate( - user_id=user_info.user_id, - password=user_info.password) + user_id=user_info.user_id, password=user_info.password + ) except AssertionError: # authentication failed because of invalid username or password msg = _('Invalid username or password') @@ -41,5 +41,6 @@ class Password(base.AuthMethodHandler): response_data['user_id'] = user_info.user_id - return base.AuthHandlerResponse(status=True, response_body=None, - response_data=response_data) + return base.AuthHandlerResponse( + status=True, response_body=None, response_data=response_data + ) diff --git a/keystone/auth/plugins/token.py b/keystone/auth/plugins/token.py index 4441ba8242..347743e8c3 100644 --- a/keystone/auth/plugins/token.py +++ b/keystone/auth/plugins/token.py @@ -37,13 +37,11 @@ class Token(base.AuthMethodHandler): def authenticate(self, auth_payload): if 'id' not in auth_payload: - raise exception.ValidationError(attribute='id', - target='token') + raise exception.ValidationError(attribute='id', target='token') token = self._get_token_ref(auth_payload) if token.is_federated and PROVIDERS.federation_api: response_data = mapped.handle_scoped_token( - token, PROVIDERS.federation_api, - PROVIDERS.identity_api + token, PROVIDERS.federation_api, PROVIDERS.identity_api ) else: response_data = token_authenticate(token) @@ -54,8 +52,9 @@ class Token(base.AuthMethodHandler): # AuthMethodHandlers do no such thing and this is not required. response_data.setdefault('method_names', []).extend(token.methods) - return base.AuthHandlerResponse(status=True, response_body=None, - response_data=response_data) + return base.AuthHandlerResponse( + status=True, response_body=None, response_data=response_data + ) def token_authenticate(token): @@ -68,23 +67,23 @@ def token_authenticate(token): # privilege attacks json_body = flask.request.get_json(silent=True, force=True) or {} - project_scoped = 'project' in json_body['auth'].get( - 'scope', {} - ) - domain_scoped = 'domain' in json_body['auth'].get( - 'scope', {} - ) + project_scoped = 'project' in json_body['auth'].get('scope', {}) + domain_scoped = 'domain' in json_body['auth'].get('scope', {}) if token.oauth_scoped: raise exception.ForbiddenAction( action=_( 'Using OAuth-scoped token to create another token. ' - 'Create a new OAuth-scoped token instead')) + 'Create a new OAuth-scoped token instead' + ) + ) elif token.trust_scoped: raise exception.ForbiddenAction( action=_( 'Using trust-scoped token to create another token. ' - 'Create a new trust-scoped token instead')) + 'Create a new trust-scoped token instead' + ) + ) elif token.system_scoped and (project_scoped or domain_scoped): raise exception.ForbiddenAction( action=_( @@ -97,7 +96,8 @@ def token_authenticate(token): # Do not allow conversion from scoped tokens. if token.project_scoped or token.domain_scoped: raise exception.ForbiddenAction( - action=_('rescope a scoped token')) + action=_('rescope a scoped token') + ) # New tokens maintain the audit_id of the original token in the # chain (if possible) as the second element in the audit data diff --git a/keystone/auth/plugins/totp.py b/keystone/auth/plugins/totp.py index 133c8b4016..be85ab747d 100644 --- a/keystone/auth/plugins/totp.py +++ b/keystone/auth/plugins/totp.py @@ -72,8 +72,12 @@ def _generate_totp_passcodes(secret, included_previous_windows=0): # HMAC-SHA1 when generating the TOTP, which is currently not insecure but # will still trigger when scanned by bandit. totp = crypto_totp.TOTP( - decoded, PASSCODE_LENGTH, hashes.SHA1(), PASSCODE_TIME_PERIOD, # nosec - backend=default_backend()) + decoded, + PASSCODE_LENGTH, + hashes.SHA1(), # nosec + PASSCODE_TIME_PERIOD, + backend=default_backend(), + ) passcode_ts = timeutils.utcnow_ts(microsecond=True) passcodes = [totp.generate(passcode_ts).decode('utf-8')] @@ -95,22 +99,29 @@ class TOTP(base.AuthMethodHandler): auth_passcode = auth_payload.get('user').get('passcode') credentials = PROVIDERS.credential_api.list_credentials_for_user( - user_info.user_id, type='totp') + user_info.user_id, type='totp' + ) valid_passcode = False for credential in credentials: try: generated_passcodes = _generate_totp_passcodes( - credential['blob'], CONF.totp.included_previous_windows) + credential['blob'], CONF.totp.included_previous_windows + ) if auth_passcode in generated_passcodes: valid_passcode = True break except (ValueError, KeyError): - LOG.debug('No TOTP match; credential id: %s, user_id: %s', - credential['id'], user_info.user_id) - except (TypeError): - LOG.debug('Base32 decode failed for TOTP credential %s', - credential['id']) + LOG.debug( + 'No TOTP match; credential id: %s, user_id: %s', + credential['id'], + user_info.user_id, + ) + except TypeError: + LOG.debug( + 'Base32 decode failed for TOTP credential %s', + credential['id'], + ) if not valid_passcode: # authentication failed because of invalid username or passcode @@ -119,5 +130,6 @@ class TOTP(base.AuthMethodHandler): response_data['user_id'] = user_info.user_id - return base.AuthHandlerResponse(status=True, response_body=None, - response_data=response_data) + return base.AuthHandlerResponse( + status=True, response_body=None, response_data=response_data + ) diff --git a/keystone/auth/schema.py b/keystone/auth/schema.py index 963d4c840e..1b748647c4 100644 --- a/keystone/auth/schema.py +++ b/keystone/auth/schema.py @@ -24,7 +24,9 @@ token_issue = { 'properties': { 'methods': { 'type': 'array', - 'items': {'type': 'string', }, + 'items': { + 'type': 'string', + }, }, 'password': { 'type': 'object', @@ -32,14 +34,24 @@ token_issue = { 'user': { 'type': 'object', 'properties': { - 'id': {'type': 'string', }, - 'name': {'type': 'string', }, - 'password': {'type': 'string', }, + 'id': { + 'type': 'string', + }, + 'name': { + 'type': 'string', + }, + 'password': { + 'type': 'string', + }, 'domain': { 'type': 'object', 'properties': { - 'id': {'type': 'string', }, - 'name': {'type': 'string', }, + 'id': { + 'type': 'string', + }, + 'name': { + 'type': 'string', + }, }, }, }, @@ -53,10 +65,14 @@ token_issue = { 'type': 'string', }, }, - 'required': ['id', ], + 'required': [ + 'id', + ], }, }, - 'required': ['methods', ], + 'required': [ + 'methods', + ], }, 'scope': { # For explicit unscoped authentication the type should not be @@ -70,13 +86,21 @@ token_issue = { 'project': { 'type': 'object', 'properties': { - 'name': {'type': 'string', }, - 'id': {'type': 'string', }, + 'name': { + 'type': 'string', + }, + 'id': { + 'type': 'string', + }, 'domain': { 'type': 'object', 'properties': { - 'id': {'type': 'string', }, - 'name': {'type': 'string', }, + 'id': { + 'type': 'string', + }, + 'name': { + 'type': 'string', + }, }, }, }, @@ -84,26 +108,32 @@ token_issue = { 'domain': { 'type': 'object', 'properties': { - 'id': {'type': 'string', }, - 'name': {'type': 'string', }, + 'id': { + 'type': 'string', + }, + 'name': { + 'type': 'string', + }, }, }, 'OS-TRUST:trust': { 'type': 'object', 'properties': { - 'id': {'type': 'string', }, - } + 'id': { + 'type': 'string', + }, + }, }, 'system': { 'type': 'object', - 'properties': { - 'all': parameter_types.boolean - } - } + 'properties': {'all': parameter_types.boolean}, + }, }, }, }, - 'required': ['identity', ], + 'required': [ + 'identity', + ], } @@ -115,8 +145,10 @@ def validate_issue_token_auth(auth=None): user = auth['identity'].get('password', {}).get('user') if user is not None: if 'id' not in user and 'name' not in user: - msg = _('Invalid input for field identity/password/user: ' - 'id or name must be present.') + msg = _( + 'Invalid input for field identity/password/user: ' + 'id or name must be present.' + ) raise exception.SchemaValidationError(detail=msg) domain = user.get('domain') @@ -124,7 +156,8 @@ def validate_issue_token_auth(auth=None): if 'id' not in domain and 'name' not in domain: msg = _( 'Invalid input for field identity/password/user/domain: ' - 'id or name must be present.') + 'id or name must be present.' + ) raise exception.SchemaValidationError(detail=msg) scope = auth.get('scope') @@ -134,19 +167,22 @@ def validate_issue_token_auth(auth=None): if 'id' not in project and 'name' not in project: msg = _( 'Invalid input for field scope/project: ' - 'id or name must be present.') + 'id or name must be present.' + ) raise exception.SchemaValidationError(detail=msg) domain = project.get('domain') if domain is not None: if 'id' not in domain and 'name' not in domain: msg = _( 'Invalid input for field scope/project/domain: ' - 'id or name must be present.') + 'id or name must be present.' + ) raise exception.SchemaValidationError(detail=msg) domain = scope.get('domain') if domain is not None: if 'id' not in domain and 'name' not in domain: msg = _( 'Invalid input for field scope/domain: ' - 'id or name must be present.') + 'id or name must be present.' + ) raise exception.SchemaValidationError(detail=msg) diff --git a/keystone/catalog/backends/base.py b/keystone/catalog/backends/base.py index 5903f0a6e7..9e5eb2edde 100644 --- a/keystone/catalog/backends/base.py +++ b/keystone/catalog/backends/base.py @@ -22,8 +22,9 @@ from keystone import exception CONF = keystone.conf.CONF -class CatalogDriverBase(provider_api.ProviderAPIMixin, object, - metaclass=abc.ABCMeta): +class CatalogDriverBase( + provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta +): """Interface description for the Catalog driver.""" def _get_list_limit(self): @@ -41,7 +42,8 @@ class CatalogDriverBase(provider_api.ProviderAPIMixin, object, # self circle if parent_region_id == root_region_id: raise exception.CircularRegionHierarchyError( - parent_region_id=parent_region_id) + parent_region_id=parent_region_id + ) parent_region = self.get_region(parent_region_id) parent_region_id = parent_region.get('parent_region_id') @@ -470,8 +472,9 @@ class CatalogDriverBase(provider_api.ProviderAPIMixin, object, raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): + def remove_endpoint_group_from_project( + self, endpoint_group_id, project_id + ): """Remove an endpoint to project association. :param endpoint_group_id: identity of endpoint to associate diff --git a/keystone/catalog/backends/sql.py b/keystone/catalog/backends/sql.py index 63c49b8f0f..f7721db201 100644 --- a/keystone/catalog/backends/sql.py +++ b/keystone/catalog/backends/sql.py @@ -52,30 +52,46 @@ class Service(sql.ModelBase, sql.ModelDictMixinWithExtras): attributes = ['id', 'type', 'enabled'] id = sql.Column(sql.String(64), primary_key=True) type = sql.Column(sql.String(255)) - enabled = sql.Column(sql.Boolean, nullable=False, default=True, - server_default=sqlalchemy.sql.expression.true()) + enabled = sql.Column( + sql.Boolean, + nullable=False, + default=True, + server_default=sqlalchemy.sql.expression.true(), + ) extra = sql.Column(sql.JsonBlob()) endpoints = sqlalchemy.orm.relationship("Endpoint", backref="service") class Endpoint(sql.ModelBase, sql.ModelDictMixinWithExtras): __tablename__ = 'endpoint' - attributes = ['id', 'interface', 'region_id', 'service_id', 'url', - 'legacy_endpoint_id', 'enabled'] + attributes = [ + 'id', + 'interface', + 'region_id', + 'service_id', + 'url', + 'legacy_endpoint_id', + 'enabled', + ] id = sql.Column(sql.String(64), primary_key=True) legacy_endpoint_id = sql.Column(sql.String(64)) interface = sql.Column(sql.String(8), nullable=False) - region_id = sql.Column(sql.String(255), - sql.ForeignKey('region.id', - ondelete='RESTRICT'), - nullable=True, - default=None) - service_id = sql.Column(sql.String(64), - sql.ForeignKey('service.id'), - nullable=False) + region_id = sql.Column( + sql.String(255), + sql.ForeignKey('region.id', ondelete='RESTRICT'), + nullable=True, + default=None, + ) + service_id = sql.Column( + sql.String(64), sql.ForeignKey('service.id'), nullable=False + ) url = sql.Column(sql.Text(), nullable=False) - enabled = sql.Column(sql.Boolean, nullable=False, default=True, - server_default=sqlalchemy.sql.expression.true()) + enabled = sql.Column( + sql.Boolean, + nullable=False, + default=True, + server_default=sqlalchemy.sql.expression.true(), + ) extra = sql.Column(sql.JsonBlob()) @classmethod @@ -277,17 +293,19 @@ class Catalog(base.CatalogDriverBase): substitutions.update({'user_id': user_id}) silent_keyerror_failures = [] if project_id: - substitutions.update({ - 'tenant_id': project_id, - 'project_id': project_id - }) + substitutions.update( + {'tenant_id': project_id, 'project_id': project_id} + ) else: silent_keyerror_failures = ['tenant_id', 'project_id'] with sql.session_for_read() as session: - endpoints = (session.query(Endpoint). - options(sql.joinedload(Endpoint.service)). - filter(Endpoint.enabled == true()).all()) + endpoints = ( + session.query(Endpoint) + .options(sql.joinedload(Endpoint.service)) + .filter(Endpoint.enabled == true()) + .all() + ) catalog = {} @@ -296,8 +314,10 @@ class Catalog(base.CatalogDriverBase): continue try: formatted_url = utils.format_url( - endpoint['url'], substitutions, - silent_keyerror_failures=silent_keyerror_failures) + endpoint['url'], + substitutions, + silent_keyerror_failures=silent_keyerror_failures, + ) if formatted_url is not None: url = formatted_url else: @@ -310,7 +330,7 @@ class Catalog(base.CatalogDriverBase): default_service = { 'id': endpoint['id'], 'name': endpoint.service.extra.get('name', ''), - 'publicURL': '' + 'publicURL': '', } catalog.setdefault(region, {}) catalog[region].setdefault(service_type, default_service) @@ -336,29 +356,37 @@ class Catalog(base.CatalogDriverBase): d.update({'user_id': user_id}) silent_keyerror_failures = [] if project_id: - d.update({ - 'tenant_id': project_id, - 'project_id': project_id, - }) + d.update( + { + 'tenant_id': project_id, + 'project_id': project_id, + } + ) else: silent_keyerror_failures = ['tenant_id', 'project_id'] with sql.session_for_read() as session: - services = (session.query(Service).filter( - Service.enabled == true()).options( - sql.joinedload(Service.endpoints)).all()) + services = ( + session.query(Service) + .filter(Service.enabled == true()) + .options(sql.joinedload(Service.endpoints)) + .all() + ) def make_v3_endpoints(endpoints): - for endpoint in (ep.to_dict() - for ep in endpoints if ep.enabled): + for endpoint in ( + ep.to_dict() for ep in endpoints if ep.enabled + ): del endpoint['service_id'] del endpoint['legacy_endpoint_id'] del endpoint['enabled'] endpoint['region'] = endpoint['region_id'] try: formatted_url = utils.format_url( - endpoint['url'], d, - silent_keyerror_failures=silent_keyerror_failures) + endpoint['url'], + d, + silent_keyerror_failures=silent_keyerror_failures, + ) if formatted_url: endpoint['url'] = formatted_url else: @@ -388,7 +416,8 @@ class Catalog(base.CatalogDriverBase): filtered_endpoints = {} if project_id: filtered_endpoints = ( - self.catalog_api.list_endpoints_for_project(project_id)) + self.catalog_api.list_endpoints_for_project(project_id) + ) # endpoint filter is enabled, only return the filtered endpoints. if filtered_endpoints: filtered_ids = list(filtered_endpoints.keys()) @@ -428,18 +457,21 @@ class Catalog(base.CatalogDriverBase): @sql.handle_conflicts(conflict_type='project_endpoint') def add_endpoint_to_project(self, endpoint_id, project_id): with sql.session_for_write() as session: - endpoint_filter_ref = ProjectEndpoint(endpoint_id=endpoint_id, - project_id=project_id) + endpoint_filter_ref = ProjectEndpoint( + endpoint_id=endpoint_id, project_id=project_id + ) session.add(endpoint_filter_ref) def _get_project_endpoint_ref(self, session, endpoint_id, project_id): endpoint_filter_ref = session.get( - ProjectEndpoint, (endpoint_id, project_id), + ProjectEndpoint, + (endpoint_id, project_id), ) if endpoint_filter_ref is None: - msg = _('Endpoint %(endpoint_id)s not found in project ' - '%(project_id)s') % {'endpoint_id': endpoint_id, - 'project_id': project_id} + msg = _( + 'Endpoint %(endpoint_id)s not found in project ' + '%(project_id)s' + ) % {'endpoint_id': endpoint_id, 'project_id': project_id} raise exception.NotFound(msg) return endpoint_filter_ref @@ -450,7 +482,8 @@ class Catalog(base.CatalogDriverBase): def remove_endpoint_from_project(self, endpoint_id, project_id): with sql.session_for_write() as session: endpoint_filter_ref = self._get_project_endpoint_ref( - session, endpoint_id, project_id) + session, endpoint_id, project_id + ) session.delete(endpoint_filter_ref) def list_endpoints_for_project(self, project_id): @@ -489,40 +522,46 @@ class Catalog(base.CatalogDriverBase): endpoint_group_ref = session.get(EndpointGroup, endpoint_group_id) if endpoint_group_ref is None: raise exception.EndpointGroupNotFound( - endpoint_group_id=endpoint_group_id) + endpoint_group_id=endpoint_group_id + ) return endpoint_group_ref def get_endpoint_group(self, endpoint_group_id): with sql.session_for_read() as session: - endpoint_group_ref = self._get_endpoint_group(session, - endpoint_group_id) + endpoint_group_ref = self._get_endpoint_group( + session, endpoint_group_id + ) return endpoint_group_ref.to_dict() def update_endpoint_group(self, endpoint_group_id, endpoint_group): with sql.session_for_write() as session: - endpoint_group_ref = self._get_endpoint_group(session, - endpoint_group_id) + endpoint_group_ref = self._get_endpoint_group( + session, endpoint_group_id + ) old_endpoint_group = endpoint_group_ref.to_dict() old_endpoint_group.update(endpoint_group) new_endpoint_group = EndpointGroup.from_dict(old_endpoint_group) for attr in EndpointGroup.mutable_attributes: - setattr(endpoint_group_ref, attr, - getattr(new_endpoint_group, attr)) + setattr( + endpoint_group_ref, attr, getattr(new_endpoint_group, attr) + ) return endpoint_group_ref.to_dict() def delete_endpoint_group(self, endpoint_group_id): with sql.session_for_write() as session: - endpoint_group_ref = self._get_endpoint_group(session, - endpoint_group_id) + endpoint_group_ref = self._get_endpoint_group( + session, endpoint_group_id + ) self._delete_endpoint_group_association_by_endpoint_group( - session, endpoint_group_id) + session, endpoint_group_id + ) session.delete(endpoint_group_ref) def get_endpoint_group_in_project(self, endpoint_group_id, project_id): with sql.session_for_read() as session: - ref = self._get_endpoint_group_in_project(session, - endpoint_group_id, - project_id) + ref = self._get_endpoint_group_in_project( + session, endpoint_group_id, project_id + ) return ref.to_dict() @sql.handle_conflicts(conflict_type='project_endpoint_group') @@ -530,13 +569,16 @@ class Catalog(base.CatalogDriverBase): with sql.session_for_write() as session: # Create a new Project Endpoint group entity endpoint_group_project_ref = ProjectEndpointGroupMembership( - endpoint_group_id=endpoint_group_id, project_id=project_id) + endpoint_group_id=endpoint_group_id, project_id=project_id + ) session.add(endpoint_group_project_ref) - def _get_endpoint_group_in_project(self, session, - endpoint_group_id, project_id): + def _get_endpoint_group_in_project( + self, session, endpoint_group_id, project_id + ): endpoint_group_project_ref = session.get( - ProjectEndpointGroupMembership, (endpoint_group_id, project_id), + ProjectEndpointGroupMembership, + (endpoint_group_id, project_id), ) if endpoint_group_project_ref is None: msg = _('Endpoint Group Project Association not found') @@ -548,7 +590,8 @@ class Catalog(base.CatalogDriverBase): with sql.session_for_read() as session: query = session.query(EndpointGroup) endpoint_group_refs = sql.filter_limit_query( - EndpointGroup, query, hints) + EndpointGroup, query, hints + ) return [e.to_dict() for e in endpoint_group_refs] def list_endpoint_groups_for_project(self, project_id): @@ -558,11 +601,13 @@ class Catalog(base.CatalogDriverBase): endpoint_group_refs = query.all() return [ref.to_dict() for ref in endpoint_group_refs] - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): + def remove_endpoint_group_from_project( + self, endpoint_group_id, project_id + ): with sql.session_for_write() as session: endpoint_group_project_ref = self._get_endpoint_group_in_project( - session, endpoint_group_id, project_id) + session, endpoint_group_id, project_id + ) session.delete(endpoint_group_project_ref) def list_projects_associated_with_endpoint_group(self, endpoint_group_id): @@ -573,7 +618,8 @@ class Catalog(base.CatalogDriverBase): return [ref.to_dict() for ref in endpoint_group_refs] def _delete_endpoint_group_association_by_endpoint_group( - self, session, endpoint_group_id): + self, session, endpoint_group_id + ): query = session.query(ProjectEndpointGroupMembership) query = query.filter_by(endpoint_group_id=endpoint_group_id) query.delete() @@ -590,12 +636,8 @@ class ProjectEndpoint(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'project_endpoint' attributes = ['endpoint_id', 'project_id'] - endpoint_id = sql.Column(sql.String(64), - primary_key=True, - nullable=False) - project_id = sql.Column(sql.String(64), - primary_key=True, - nullable=False) + endpoint_id = sql.Column(sql.String(64), primary_key=True, nullable=False) + project_id = sql.Column(sql.String(64), primary_key=True, nullable=False) class EndpointGroup(sql.ModelBase, sql.ModelDictMixin): @@ -615,9 +657,10 @@ class ProjectEndpointGroupMembership(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'project_endpoint_group' attributes = ['endpoint_group_id', 'project_id'] - endpoint_group_id = sql.Column(sql.String(64), - sql.ForeignKey('endpoint_group.id'), - nullable=False) + endpoint_group_id = sql.Column( + sql.String(64), sql.ForeignKey('endpoint_group.id'), nullable=False + ) project_id = sql.Column(sql.String(64), nullable=False) - __table_args__ = (sql.PrimaryKeyConstraint('endpoint_group_id', - 'project_id'),) + __table_args__ = ( + sql.PrimaryKeyConstraint('endpoint_group_id', 'project_id'), + ) diff --git a/keystone/catalog/backends/templated.py b/keystone/catalog/backends/templated.py index 3370ef8e1b..ab615b4cca 100644 --- a/keystone/catalog/backends/templated.py +++ b/keystone/catalog/backends/templated.py @@ -83,8 +83,10 @@ class Catalog(base.CatalogDriverBase): def __init__(self, templates=None): super(Catalog, self).__init__() - LOG.warning('The templated catalog driver has been deprecated and ' - 'will be removed in a future release.') + LOG.warning( + 'The templated catalog driver has been deprecated and ' + 'will be removed in a future release.' + ) if templates: self.templates = templates else: @@ -107,8 +109,10 @@ class Catalog(base.CatalogDriverBase): raise exception.NotImplemented() def list_regions(self, hints): - return [{'id': region_id, 'description': '', 'parent_region_id': ''} - for region_id in self.templates] + return [ + {'id': region_id, 'description': '', 'parent_region_id': ''} + for region_id in self.templates + ] def get_region(self, region_id): if region_id in self.templates: @@ -163,8 +167,11 @@ class Catalog(base.CatalogDriverBase): for key in service_ref: if key.endswith('URL'): interface = key[:-3] - endpoint_id = ('%s-%s-%s' % - (region_id, service_type, interface)) + endpoint_id = '%s-%s-%s' % ( + region_id, + service_type, + interface, + ) yield { 'id': endpoint_id, 'service_id': service_type, @@ -208,12 +215,17 @@ class Catalog(base.CatalogDriverBase): substitutions.update({'user_id': user_id}) silent_keyerror_failures = [] if project_id: - substitutions.update({ - 'tenant_id': project_id, - 'project_id': project_id, - }) + substitutions.update( + { + 'tenant_id': project_id, + 'project_id': project_id, + } + ) else: - silent_keyerror_failures = ['tenant_id', 'project_id', ] + silent_keyerror_failures = [ + 'tenant_id', + 'project_id', + ] catalog = {} # TODO(davechen): If there is service with no endpoints, we should @@ -226,8 +238,10 @@ class Catalog(base.CatalogDriverBase): try: for k, v in service_ref.items(): formatted_value = utils.format_url( - v, substitutions, - silent_keyerror_failures=silent_keyerror_failures) + v, + substitutions, + silent_keyerror_failures=silent_keyerror_failures, + ) if formatted_value: service_data[k] = formatted_value except exception.MalformedEndpoint: # nosec(tkelsey) @@ -259,7 +273,7 @@ class Catalog(base.CatalogDriverBase): if service_type not in v3_catalog: v3_catalog[service_type] = { 'type': service_type, - 'endpoints': [] + 'endpoints': [], } for attr, value in service.items(): @@ -270,12 +284,14 @@ class Catalog(base.CatalogDriverBase): # { 'interface': 'public', 'url': '', 'region': # 'region: '' } if attr.endswith('URL'): - v3_interface = attr[:-len('URL')] - v3_catalog[service_type]['endpoints'].append({ - 'interface': v3_interface, - 'region': region_name, - 'url': value, - }) + v3_interface = attr[: -len('URL')] + v3_catalog[service_type]['endpoints'].append( + { + 'interface': v3_interface, + 'region': region_name, + 'url': value, + } + ) continue # Other attributes are copied to the service. @@ -331,8 +347,9 @@ class Catalog(base.CatalogDriverBase): def list_projects_associated_with_endpoint_group(self, endpoint_group_id): raise exception.NotImplemented() - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): + def remove_endpoint_group_from_project( + self, endpoint_group_id, project_id + ): raise exception.NotImplemented() def delete_endpoint_group_association_by_project(self, project_id): diff --git a/keystone/catalog/core.py b/keystone/catalog/core.py index a4e1e24f15..39ed06cbe4 100644 --- a/keystone/catalog/core.py +++ b/keystone/catalog/core.py @@ -38,8 +38,8 @@ MEMOIZE = cache.get_memoization_decorator(group='catalog') # entire cache region. COMPUTED_CATALOG_REGION = cache.create_region(name='computed catalog region') MEMOIZE_COMPUTED_CATALOG = cache.get_memoization_decorator( - group='catalog', - region=COMPUTED_CATALOG_REGION) + group='catalog', region=COMPUTED_CATALOG_REGION +) class Manager(manager.Manager): @@ -60,23 +60,31 @@ class Manager(manager.Manager): def __init__(self): super(Manager, self).__init__(CONF.catalog.driver) notifications.register_event_callback( - notifications.ACTIONS.deleted, 'project', - self._on_project_or_endpoint_delete) + notifications.ACTIONS.deleted, + 'project', + self._on_project_or_endpoint_delete, + ) notifications.register_event_callback( - notifications.ACTIONS.deleted, 'endpoint', - self._on_project_or_endpoint_delete) + notifications.ACTIONS.deleted, + 'endpoint', + self._on_project_or_endpoint_delete, + ) - def _on_project_or_endpoint_delete(self, service, resource_type, operation, - payload): + def _on_project_or_endpoint_delete( + self, service, resource_type, operation, payload + ): project_or_endpoint_id = payload['resource_info'] if resource_type == 'project': PROVIDERS.catalog_api.delete_association_by_project( - project_or_endpoint_id) + project_or_endpoint_id + ) PROVIDERS.catalog_api.delete_endpoint_group_association_by_project( - project_or_endpoint_id) + project_or_endpoint_id + ) else: PROVIDERS.catalog_api.delete_association_by_endpoint( - project_or_endpoint_id) + project_or_endpoint_id + ) def create_region(self, region_ref, initiator=None): # Check duplicate ID @@ -183,16 +191,18 @@ class Manager(manager.Manager): if region_id is not None: self.get_region(region_id) except exception.RegionNotFound: - raise exception.ValidationError(attribute='endpoint region_id', - target='region table') + raise exception.ValidationError( + attribute='endpoint region_id', target='region table' + ) def _assert_service_exists(self, service_id): try: if service_id is not None: self.get_service(service_id) except exception.ServiceNotFound: - raise exception.ValidationError(attribute='endpoint service_id', - target='service table') + raise exception.ValidationError( + attribute='endpoint service_id', target='service table' + ) def create_endpoint(self, endpoint_id, endpoint_ref, initiator=None): self._assert_region_exists(endpoint_ref.get('region_id')) @@ -247,19 +257,23 @@ class Manager(manager.Manager): def add_endpoint_group_to_project(self, endpoint_group_id, project_id): self.driver.add_endpoint_group_to_project( - endpoint_group_id, project_id) + endpoint_group_id, project_id + ) COMPUTED_CATALOG_REGION.invalidate() - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): + def remove_endpoint_group_from_project( + self, endpoint_group_id, project_id + ): self.driver.remove_endpoint_group_from_project( - endpoint_group_id, project_id) + endpoint_group_id, project_id + ) COMPUTED_CATALOG_REGION.invalidate() def delete_endpoint_group_association_by_project(self, project_id): try: self.driver.delete_endpoint_group_association_by_project( - project_id) + project_id + ) except exception.NotImplemented: # Some catalog drivers don't support this pass @@ -270,8 +284,10 @@ class Manager(manager.Manager): PROVIDERS.resource_api.get_project(project_id) try: refs = self.list_endpoint_groups_for_project(project_id) - endpoint_groups = [self.get_endpoint_group( - ref['endpoint_group_id']) for ref in refs] + endpoint_groups = [ + self.get_endpoint_group(ref['endpoint_group_id']) + for ref in refs + ] return endpoint_groups except exception.EndpointGroupNotFound: return [] @@ -307,15 +323,17 @@ class Manager(manager.Manager): filtered_endpoints.update({ref['endpoint_id']: endpoint}) except exception.EndpointNotFound: # remove bad reference from association - self.remove_endpoint_from_project(ref['endpoint_id'], - project_id) + self.remove_endpoint_from_project( + ref['endpoint_id'], project_id + ) # need to recover endpoint_groups associated with project # then for each endpoint group return the endpoints. endpoint_groups = self.get_endpoint_groups_for_project(project_id) for endpoint_group in endpoint_groups: endpoint_refs = self.get_endpoints_filtered_by_endpoint_group( - endpoint_group['id']) + endpoint_group['id'] + ) # now check if any endpoints for current endpoint group are not # contained in the list of filtered endpoints for endpoint_ref in endpoint_refs: diff --git a/keystone/catalog/schema.py b/keystone/catalog/schema.py index 8f435678ee..c94b6fe9be 100644 --- a/keystone/catalog/schema.py +++ b/keystone/catalog/schema.py @@ -13,28 +13,20 @@ from keystone.common import validation from keystone.common.validation import parameter_types -_service_properties_type = { - 'type': 'string', - 'minLength': 1, - 'maxLength': 255 -} +_service_properties_type = {'type': 'string', 'minLength': 1, 'maxLength': 255} _region_properties = { 'description': validation.nullable(parameter_types.description), # NOTE(lbragstad): Regions use ID differently. The user can specify the ID # or it will be generated automatically. - 'id': { - 'type': 'string' - }, - 'parent_region_id': { - 'type': ['string', 'null'] - } + 'id': {'type': 'string'}, + 'parent_region_id': {'type': ['string', 'null']}, } region_create = { 'type': 'object', 'properties': _region_properties, - 'additionalProperties': True + 'additionalProperties': True, # NOTE(lbragstad): No parameters are required for creating regions. } @@ -42,7 +34,7 @@ region_update = { 'type': 'object', 'properties': _region_properties, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } # Schema for Service v3 @@ -50,71 +42,60 @@ region_update = { _service_properties = { 'enabled': parameter_types.boolean, 'name': parameter_types.name, - 'type': _service_properties_type + 'type': _service_properties_type, } service_create = { 'type': 'object', 'properties': _service_properties, 'required': ['type'], - 'additionalProperties': True + 'additionalProperties': True, } service_update = { 'type': 'object', 'properties': _service_properties, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } _endpoint_properties = { 'enabled': parameter_types.boolean, - 'interface': { - 'type': 'string', - 'enum': ['admin', 'internal', 'public'] - }, - 'region_id': { - 'type': 'string' - }, - 'region': { - 'type': 'string' - }, - 'service_id': { - 'type': 'string' - }, - 'url': parameter_types.url + 'interface': {'type': 'string', 'enum': ['admin', 'internal', 'public']}, + 'region_id': {'type': 'string'}, + 'region': {'type': 'string'}, + 'service_id': {'type': 'string'}, + 'url': parameter_types.url, } endpoint_create = { 'type': 'object', 'properties': _endpoint_properties, 'required': ['interface', 'service_id', 'url'], - 'additionalProperties': True + 'additionalProperties': True, } endpoint_update = { 'type': 'object', 'properties': _endpoint_properties, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } _endpoint_group_properties = { 'description': validation.nullable(parameter_types.description), - 'filters': { - 'type': 'object' - }, - 'name': parameter_types.name + 'filters': {'type': 'object'}, + 'name': parameter_types.name, } endpoint_group_create = { 'type': 'object', 'properties': _endpoint_group_properties, - 'required': ['name', 'filters'] + 'required': ['name', 'filters'], } endpoint_group_update = { 'type': 'object', 'properties': _endpoint_group_properties, - 'minProperties': 1 + 'minProperties': 1, } diff --git a/keystone/cmd/bootstrap.py b/keystone/cmd/bootstrap.py index 700864b189..227101ecbf 100644 --- a/keystone/cmd/bootstrap.py +++ b/keystone/cmd/bootstrap.py @@ -84,17 +84,19 @@ class Bootstrapper(object): 'id': CONF.identity.default_domain_id, 'name': 'Default', 'enabled': True, - 'description': 'The default domain' + 'description': 'The default domain', } try: PROVIDERS.resource_api.create_domain( - domain_id=default_domain['id'], - domain=default_domain) + domain_id=default_domain['id'], domain=default_domain + ) LOG.info('Created domain %s', default_domain['id']) except exception.Conflict: # NOTE(morganfainberg): Domain already exists, continue on. - LOG.info('Domain %s already exists, skipping creation.', - default_domain['id']) + LOG.info( + 'Domain %s already exists, skipping creation.', + default_domain['id'], + ) self.default_domain_id = default_domain['id'] @@ -106,13 +108,15 @@ class Bootstrapper(object): 'id': project_id, 'domain_id': self.default_domain_id, 'description': 'Bootstrap project for initializing the cloud.', - 'name': self.project_name + 'name': self.project_name, } PROVIDERS.resource_api.create_project(project_id, project) LOG.info('Created project %s', self.project_name) except exception.Conflict: - LOG.info('Project %s already exists, skipping creation.', - self.project_name) + LOG.info( + 'Project %s already exists, skipping creation.', + self.project_name, + ) project = PROVIDERS.resource_api.get_project_by_name( self.project_name, self.default_domain_id ) @@ -129,11 +133,14 @@ class Bootstrapper(object): role = PROVIDERS.role_api.create_role(role_id, role) LOG.info('Created role %s', role_name) if not self.immutable_roles: - LOG.warning("Role %(role)s was created as a mutable role. It " - "is recommended to make this role immutable by " - "adding the 'immutable' resource option to this " - "role, or re-running this command without " - "--no-immutable-role.", {'role': role_name}) + LOG.warning( + "Role %(role)s was created as a mutable role. It " + "is recommended to make this role immutable by " + "adding the 'immutable' resource option to this " + "role, or re-running this command without " + "--no-immutable-role.", + {'role': role_name}, + ) return role except exception.Conflict: LOG.info('Role %s exists, skipping creation.', role_name) @@ -154,18 +161,19 @@ class Bootstrapper(object): def _ensure_implied_role(self, prior_role_id, implied_role_id): try: - PROVIDERS.role_api.create_implied_role(prior_role_id, - implied_role_id) + PROVIDERS.role_api.create_implied_role( + prior_role_id, implied_role_id + ) LOG.info( 'Created implied role where %s implies %s', prior_role_id, - implied_role_id + implied_role_id, ) except exception.Conflict: LOG.info( 'Implied role where %s implies %s exists, skipping creation.', prior_role_id, - implied_role_id + implied_role_id, ) def _bootstrap_service_role(self): @@ -194,8 +202,9 @@ class Bootstrapper(object): # "manager" role, so we need to clean up the old admin -> member # implied role try: - PROVIDERS.role_api.delete_implied_role(self.admin_role_id, - self.member_role_id) + PROVIDERS.role_api.delete_implied_role( + self.admin_role_id, self.member_role_id + ) except exception.ImpliedRoleNotFound: pass @@ -205,8 +214,10 @@ class Bootstrapper(object): user = PROVIDERS.identity_api.get_user_by_name( self.admin_username, self.default_domain_id ) - LOG.info('User %s already exists, skipping creation.', - self.admin_username) + LOG.info( + 'User %s already exists, skipping creation.', + self.admin_username, + ) # If the user is not enabled, re-enable them. This also helps # provide some useful logging output later. @@ -232,9 +243,7 @@ class Bootstrapper(object): # or the user was previously disabled. This allows bootstrap to act # as a recovery tool, without having to create a new user. if update: - user = PROVIDERS.identity_api.update_user( - user['id'], update - ) + user = PROVIDERS.identity_api.update_user(user['id'], update) LOG.info('Reset password for user %s.', self.admin_username) if not enabled and user['enabled']: # Although we always try to enable the user, this log @@ -247,7 +256,7 @@ class Bootstrapper(object): 'name': self.admin_username, 'enabled': True, 'domain_id': self.default_domain_id, - 'password': self.admin_password + 'password': self.admin_password, } ) LOG.info('Created user %s', self.admin_username) @@ -259,19 +268,27 @@ class Bootstrapper(object): PROVIDERS.assignment_api.add_role_to_user_and_project( user_id=self.admin_user_id, project_id=self.project_id, - role_id=self.admin_role_id + role_id=self.admin_role_id, + ) + LOG.info( + 'Granted role %(role)s on project %(project)s to ' + 'user %(username)s.', + { + 'role': self.admin_role_name, + 'project': self.project_name, + 'username': self.admin_username, + }, ) - LOG.info('Granted role %(role)s on project %(project)s to ' - 'user %(username)s.', - {'role': self.admin_role_name, - 'project': self.project_name, - 'username': self.admin_username}) except exception.Conflict: - LOG.info('User %(username)s already has role %(role)s on ' - 'project %(project)s.', - {'username': self.admin_username, - 'role': self.admin_role_name, - 'project': self.project_name}) + LOG.info( + 'User %(username)s already has role %(role)s on ' + 'project %(project)s.', + { + 'username': self.admin_username, + 'role': self.admin_role_name, + 'project': self.project_name, + }, + ) def _bootstrap_system_role_assignment(self): # NOTE(lbragstad): We need to make sure a user has at least one role on @@ -284,15 +301,22 @@ class Bootstrapper(object): PROVIDERS.assignment_api.create_system_grant_for_user( self.admin_user_id, self.admin_role_id ) - LOG.info('Granted role %(role)s on the system to user' - ' %(username)s.', - {'role': self.admin_role_name, - 'username': self.admin_username}) + LOG.info( + 'Granted role %(role)s on the system to user' ' %(username)s.', + { + 'role': self.admin_role_name, + 'username': self.admin_username, + }, + ) except exception.Conflict: - LOG.info('User %(username)s already has role %(role)s on ' - 'the system.', - {'username': self.admin_username, - 'role': self.admin_role_name}) + LOG.info( + 'User %(username)s already has role %(role)s on ' + 'the system.', + { + 'username': self.admin_username, + 'role': self.admin_role_name, + }, + ) def _bootstrap_region(self): if self.region_id: @@ -302,8 +326,9 @@ class Bootstrapper(object): ) LOG.info('Created region %s', self.region_id) except exception.Conflict: - LOG.info('Region %s exists, skipping creation.', - self.region_id) + LOG.info( + 'Region %s exists, skipping creation.', self.region_id + ) def _bootstrap_catalog(self): if self.public_url or self.admin_url or self.internal_url: @@ -323,8 +348,10 @@ class Bootstrapper(object): else: service_id = uuid.uuid4().hex service = { - 'id': service_id, 'name': self.service_name, - 'type': 'identity', 'enabled': True + 'id': service_id, + 'name': self.service_name, + 'type': 'identity', + 'enabled': True, } PROVIDERS.catalog_api.create_service(service_id, service) @@ -332,9 +359,11 @@ class Bootstrapper(object): self.service_id = service['id'] available_interfaces = {e['interface']: e for e in endpoints} - expected_endpoints = {'public': self.public_url, - 'internal': self.internal_url, - 'admin': self.admin_url} + expected_endpoints = { + 'public': self.public_url, + 'internal': self.internal_url, + 'admin': self.admin_url, + } for interface, url in expected_endpoints.items(): if not url: @@ -344,26 +373,32 @@ class Bootstrapper(object): try: endpoint_ref = available_interfaces[interface] except KeyError: - endpoint_ref = {'id': uuid.uuid4().hex, - 'interface': interface, - 'url': url, - 'service_id': self.service_id, - 'enabled': True} + endpoint_ref = { + 'id': uuid.uuid4().hex, + 'interface': interface, + 'url': url, + 'service_id': self.service_id, + 'enabled': True, + } if self.region_id: endpoint_ref['region_id'] = self.region_id PROVIDERS.catalog_api.create_endpoint( endpoint_id=endpoint_ref['id'], - endpoint_ref=endpoint_ref) + endpoint_ref=endpoint_ref, + ) - LOG.info('Created %(interface)s endpoint %(url)s', - {'interface': interface, 'url': url}) + LOG.info( + 'Created %(interface)s endpoint %(url)s', + {'interface': interface, 'url': url}, + ) else: endpoint_ref['url'] = url PROVIDERS.catalog_api.update_endpoint( endpoint_id=endpoint_ref['id'], - endpoint_ref=endpoint_ref) + endpoint_ref=endpoint_ref, + ) LOG.info('%s endpoint updated', interface) self.endpoints[interface] = endpoint_ref['id'] diff --git a/keystone/cmd/cli.py b/keystone/cmd/cli.py index 8a862f77bc..cc68a23cef 100644 --- a/keystone/cmd/cli.py +++ b/keystone/cmd/cli.py @@ -72,62 +72,109 @@ class BootStrap(BaseApp): @classmethod def add_argument_parser(cls, subparsers): parser = super(BootStrap, cls).add_argument_parser(subparsers) - parser.add_argument('--bootstrap-username', default='admin', - metavar='OS_BOOTSTRAP_USERNAME', - help=('The username of the initial keystone ' - 'user during bootstrap process.')) + parser.add_argument( + '--bootstrap-username', + default='admin', + metavar='OS_BOOTSTRAP_USERNAME', + help=( + 'The username of the initial keystone ' + 'user during bootstrap process.' + ), + ) # NOTE(morganfainberg): See below for ENV Variable that can be used # in lieu of the command-line arguments. - parser.add_argument('--bootstrap-password', default=None, - metavar='OS_BOOTSTRAP_PASSWORD', - help='The bootstrap user password') - parser.add_argument('--bootstrap-project-name', default='admin', - metavar='OS_BOOTSTRAP_PROJECT_NAME', - help=('The initial project created during the ' - 'keystone bootstrap process.')) - parser.add_argument('--bootstrap-role-name', default='admin', - metavar='OS_BOOTSTRAP_ROLE_NAME', - help=('The initial role-name created during the ' - 'keystone bootstrap process.')) - parser.add_argument('--bootstrap-service-name', default='keystone', - metavar='OS_BOOTSTRAP_SERVICE_NAME', - help=('The initial name for the initial identity ' - 'service created during the keystone ' - 'bootstrap process.')) - parser.add_argument('--bootstrap-admin-url', - metavar='OS_BOOTSTRAP_ADMIN_URL', - help=('The initial identity admin url created ' - 'during the keystone bootstrap process. ' - 'e.g. http://127.0.0.1:5000/v3')) - parser.add_argument('--bootstrap-public-url', - metavar='OS_BOOTSTRAP_PUBLIC_URL', - help=('The initial identity public url created ' - 'during the keystone bootstrap process. ' - 'e.g. http://127.0.0.1:5000/v3')) - parser.add_argument('--bootstrap-internal-url', - metavar='OS_BOOTSTRAP_INTERNAL_URL', - help=('The initial identity internal url created ' - 'during the keystone bootstrap process. ' - 'e.g. http://127.0.0.1:5000/v3')) - parser.add_argument('--bootstrap-region-id', - metavar='OS_BOOTSTRAP_REGION_ID', - help=('The initial region_id endpoints will be ' - 'placed in during the keystone bootstrap ' - 'process.')) - parser.add_argument('--immutable-roles', - default=True, - action='store_true', - help=('Whether default roles (admin, member, and ' - 'reader) should be immutable. This is the ' - 'default.')) - parser.add_argument('--no-immutable-roles', - default=False, - action='store_true', - help=('Whether default roles (admin, member, and ' - 'reader) should be immutable. Immutable ' - 'default roles is the default, use this ' - 'flag to opt out of immutable default ' - 'roles.')) + parser.add_argument( + '--bootstrap-password', + default=None, + metavar='OS_BOOTSTRAP_PASSWORD', + help='The bootstrap user password', + ) + parser.add_argument( + '--bootstrap-project-name', + default='admin', + metavar='OS_BOOTSTRAP_PROJECT_NAME', + help=( + 'The initial project created during the ' + 'keystone bootstrap process.' + ), + ) + parser.add_argument( + '--bootstrap-role-name', + default='admin', + metavar='OS_BOOTSTRAP_ROLE_NAME', + help=( + 'The initial role-name created during the ' + 'keystone bootstrap process.' + ), + ) + parser.add_argument( + '--bootstrap-service-name', + default='keystone', + metavar='OS_BOOTSTRAP_SERVICE_NAME', + help=( + 'The initial name for the initial identity ' + 'service created during the keystone ' + 'bootstrap process.' + ), + ) + parser.add_argument( + '--bootstrap-admin-url', + metavar='OS_BOOTSTRAP_ADMIN_URL', + help=( + 'The initial identity admin url created ' + 'during the keystone bootstrap process. ' + 'e.g. http://127.0.0.1:5000/v3' + ), + ) + parser.add_argument( + '--bootstrap-public-url', + metavar='OS_BOOTSTRAP_PUBLIC_URL', + help=( + 'The initial identity public url created ' + 'during the keystone bootstrap process. ' + 'e.g. http://127.0.0.1:5000/v3' + ), + ) + parser.add_argument( + '--bootstrap-internal-url', + metavar='OS_BOOTSTRAP_INTERNAL_URL', + help=( + 'The initial identity internal url created ' + 'during the keystone bootstrap process. ' + 'e.g. http://127.0.0.1:5000/v3' + ), + ) + parser.add_argument( + '--bootstrap-region-id', + metavar='OS_BOOTSTRAP_REGION_ID', + help=( + 'The initial region_id endpoints will be ' + 'placed in during the keystone bootstrap ' + 'process.' + ), + ) + parser.add_argument( + '--immutable-roles', + default=True, + action='store_true', + help=( + 'Whether default roles (admin, member, and ' + 'reader) should be immutable. This is the ' + 'default.' + ), + ) + parser.add_argument( + '--no-immutable-roles', + default=False, + action='store_true', + help=( + 'Whether default roles (admin, member, and ' + 'reader) should be immutable. Immutable ' + 'default roles is the default, use this ' + 'flag to opt out of immutable default ' + 'roles.' + ), + ) return parser def do_bootstrap(self): @@ -139,38 +186,51 @@ class BootStrap(BaseApp): deployment. """ self.username = ( - os.environ.get('OS_BOOTSTRAP_USERNAME') or - CONF.command.bootstrap_username) + os.environ.get('OS_BOOTSTRAP_USERNAME') + or CONF.command.bootstrap_username + ) self.project_name = ( - os.environ.get('OS_BOOTSTRAP_PROJECT_NAME') or - CONF.command.bootstrap_project_name) + os.environ.get('OS_BOOTSTRAP_PROJECT_NAME') + or CONF.command.bootstrap_project_name + ) self.role_name = ( - os.environ.get('OS_BOOTSTRAP_ROLE_NAME') or - CONF.command.bootstrap_role_name) + os.environ.get('OS_BOOTSTRAP_ROLE_NAME') + or CONF.command.bootstrap_role_name + ) self.password = ( - os.environ.get('OS_BOOTSTRAP_PASSWORD') or - CONF.command.bootstrap_password) + os.environ.get('OS_BOOTSTRAP_PASSWORD') + or CONF.command.bootstrap_password + ) self.service_name = ( - os.environ.get('OS_BOOTSTRAP_SERVICE_NAME') or - CONF.command.bootstrap_service_name) + os.environ.get('OS_BOOTSTRAP_SERVICE_NAME') + or CONF.command.bootstrap_service_name + ) self.admin_url = ( - os.environ.get('OS_BOOTSTRAP_ADMIN_URL') or - CONF.command.bootstrap_admin_url) + os.environ.get('OS_BOOTSTRAP_ADMIN_URL') + or CONF.command.bootstrap_admin_url + ) self.public_url = ( - os.environ.get('OS_BOOTSTRAP_PUBLIC_URL') or - CONF.command.bootstrap_public_url) + os.environ.get('OS_BOOTSTRAP_PUBLIC_URL') + or CONF.command.bootstrap_public_url + ) self.internal_url = ( - os.environ.get('OS_BOOTSTRAP_INTERNAL_URL') or - CONF.command.bootstrap_internal_url) + os.environ.get('OS_BOOTSTRAP_INTERNAL_URL') + or CONF.command.bootstrap_internal_url + ) self.region_id = ( - os.environ.get('OS_BOOTSTRAP_REGION_ID') or - CONF.command.bootstrap_region_id) + os.environ.get('OS_BOOTSTRAP_REGION_ID') + or CONF.command.bootstrap_region_id + ) self.service_id = None self.endpoints = None if self.password is None: - print(_('ERROR: Either --bootstrap-password argument or ' - 'OS_BOOTSTRAP_PASSWORD must be set.')) + print( + _( + 'ERROR: Either --bootstrap-password argument or ' + 'OS_BOOTSTRAP_PASSWORD must be set.' + ) + ) sys.exit(1) self.bootstrapper.admin_password = self.password @@ -213,12 +273,17 @@ class ProjectSetup(BaseApp): @classmethod def add_argument_parser(cls, subparsers): parser = super(ProjectSetup, cls).add_argument_parser(subparsers) - parser.add_argument('--project-name', default=None, required=True, - help='The name of the keystone project being' - ' created.') - parser.add_argument('--project-id', default=None, - help='The UUID of the keystone project being' - ' created.') + parser.add_argument( + '--project-name', + default=None, + required=True, + help='The name of the keystone project being' ' created.', + ) + parser.add_argument( + '--project-id', + default=None, + help='The UUID of the keystone project being' ' created.', + ) return parser def do_project_setup(self): @@ -244,16 +309,24 @@ class UserSetup(BaseApp): @classmethod def add_argument_parser(cls, subparsers): parser = super(UserSetup, cls).add_argument_parser(subparsers) - parser.add_argument('--username', default=None, required=True, - help='The username of the keystone user that' - ' is being created.') - parser.add_argument('--user-password-plain', default=None, - required=True, - help='The plaintext password for the keystone' - ' user that is being created.') - parser.add_argument('--user-id', default=None, - help='The UUID of the keystone user being ' - 'created.') + parser.add_argument( + '--username', + default=None, + required=True, + help='The username of the keystone user that' ' is being created.', + ) + parser.add_argument( + '--user-password-plain', + default=None, + required=True, + help='The plaintext password for the keystone' + ' user that is being created.', + ) + parser.add_argument( + '--user-id', + default=None, + help='The UUID of the keystone user being ' 'created.', + ) return parser def do_user_setup(self): @@ -369,22 +442,28 @@ class DbSync(BaseApp): heads = upgrades.get_current_heads() if ( - upgrades.EXPAND_BRANCH not in heads or - heads[upgrades.EXPAND_BRANCH] != expand_version + upgrades.EXPAND_BRANCH not in heads + or heads[upgrades.EXPAND_BRANCH] != expand_version ): - LOG.info('Your database is not up to date. Your first step is ' - 'to run `keystone-manage db_sync --expand`.') + LOG.info( + 'Your database is not up to date. Your first step is ' + 'to run `keystone-manage db_sync --expand`.' + ) status = 2 elif ( - upgrades.CONTRACT_BRANCH not in heads or - heads[upgrades.CONTRACT_BRANCH] != contract_version + upgrades.CONTRACT_BRANCH not in heads + or heads[upgrades.CONTRACT_BRANCH] != contract_version ): - LOG.info('Expand version is ahead of contract. Your next ' - 'step is to run `keystone-manage db_sync --contract`.') + LOG.info( + 'Expand version is ahead of contract. Your next ' + 'step is to run `keystone-manage db_sync --contract`.' + ) status = 4 else: - LOG.info('All db_sync commands are upgraded to the same ' - 'version and up-to-date.') + LOG.info( + 'All db_sync commands are upgraded to the same ' + 'version and up-to-date.' + ) LOG.info( 'Current repository versions:\n' @@ -410,8 +489,7 @@ class DbSync(BaseApp): elif CONF.command.contract: upgrades.contract_schema() else: - upgrades.offline_sync_database_to_version( - CONF.command.version) + upgrades.offline_sync_database_to_version(CONF.command.version) class DbVersion(BaseApp): @@ -429,9 +507,10 @@ class BasePermissionsSetup(BaseApp): @classmethod def add_argument_parser(cls, subparsers): - parser = super(BasePermissionsSetup, - cls).add_argument_parser(subparsers) - running_as_root = (os.geteuid() == 0) + parser = super(BasePermissionsSetup, cls).add_argument_parser( + subparsers + ) + running_as_root = os.geteuid() == 0 parser.add_argument('--keystone-user', required=running_as_root) parser.add_argument('--keystone-group', required=running_as_root) return parser @@ -459,27 +538,26 @@ class BasePermissionsSetup(BaseApp): @classmethod def initialize_fernet_repository( - cls, keystone_user_id, keystone_group_id, config_group=None): + cls, keystone_user_id, keystone_group_id, config_group=None + ): conf_group = getattr(CONF, config_group) futils = fernet_utils.FernetUtils( - conf_group.key_repository, - conf_group.max_active_keys, - config_group + conf_group.key_repository, conf_group.max_active_keys, config_group ) futils.create_key_directory(keystone_user_id, keystone_group_id) if futils.validate_key_repository(requires_write=True): futils.initialize_key_repository( - keystone_user_id, keystone_group_id) + keystone_user_id, keystone_group_id + ) @classmethod def rotate_fernet_repository( - cls, keystone_user_id, keystone_group_id, config_group=None): + cls, keystone_user_id, keystone_group_id, config_group=None + ): conf_group = getattr(CONF, config_group) futils = fernet_utils.FernetUtils( - conf_group.key_repository, - conf_group.max_active_keys, - config_group + conf_group.key_repository, conf_group.max_active_keys, config_group ) if futils.validate_key_repository(requires_write=True): futils.rotate_keys(keystone_user_id, keystone_group_id) @@ -500,14 +578,19 @@ class FernetSetup(BasePermissionsSetup): def main(cls): keystone_user_id, keystone_group_id = cls.get_user_group() cls.initialize_fernet_repository( - keystone_user_id, keystone_group_id, 'fernet_tokens') + keystone_user_id, keystone_group_id, 'fernet_tokens' + ) - if (os.path.abspath(CONF.fernet_tokens.key_repository) != - os.path.abspath(CONF.fernet_receipts.key_repository)): + if os.path.abspath( + CONF.fernet_tokens.key_repository + ) != os.path.abspath(CONF.fernet_receipts.key_repository): cls.initialize_fernet_repository( - keystone_user_id, keystone_group_id, 'fernet_receipts') - elif (CONF.fernet_tokens.max_active_keys != - CONF.fernet_receipts.max_active_keys): + keystone_user_id, keystone_group_id, 'fernet_receipts' + ) + elif ( + CONF.fernet_tokens.max_active_keys + != CONF.fernet_receipts.max_active_keys + ): # WARNING(adriant): If the directories are the same, # 'max_active_keys' is ignored from fernet_receipts in favor of # fernet_tokens to avoid a potential mismatch. Only if the @@ -545,11 +628,14 @@ class FernetRotate(BasePermissionsSetup): def main(cls): keystone_user_id, keystone_group_id = cls.get_user_group() cls.rotate_fernet_repository( - keystone_user_id, keystone_group_id, 'fernet_tokens') - if (os.path.abspath(CONF.fernet_tokens.key_repository) != - os.path.abspath(CONF.fernet_receipts.key_repository)): + keystone_user_id, keystone_group_id, 'fernet_tokens' + ) + if os.path.abspath( + CONF.fernet_tokens.key_repository + ) != os.path.abspath(CONF.fernet_receipts.key_repository): cls.rotate_fernet_repository( - keystone_user_id, keystone_group_id, 'fernet_receipts') + keystone_user_id, keystone_group_id, 'fernet_receipts' + ) class CreateJWSKeyPair(BasePermissionsSetup): @@ -568,8 +654,9 @@ class CreateJWSKeyPair(BasePermissionsSetup): parser = super(CreateJWSKeyPair, cls).add_argument_parser(subparsers) parser.add_argument( - '--force', action='store_true', - help=('Forcibly overwrite keys if they already exist') + '--force', + action='store_true', + help=('Forcibly overwrite keys if they already exist'), ) return parser @@ -580,11 +667,15 @@ class CreateJWSKeyPair(BasePermissionsSetup): public_key_path = os.path.join(current_directory, 'public.pem') if os.path.isfile(private_key_path) and not CONF.command.force: - raise SystemExit(_('Private key %(path)s already exists') - % {'path': private_key_path}) + raise SystemExit( + _('Private key %(path)s already exists') + % {'path': private_key_path} + ) if os.path.isfile(public_key_path) and not CONF.command.force: - raise SystemExit(_('Public key %(path)s already exists') - % {'path': public_key_path}) + raise SystemExit( + _('Public key %(path)s already exists') + % {'path': public_key_path} + ) jwt_utils.create_jws_keypair(private_key_path, public_key_path) @@ -604,7 +695,8 @@ class TokenSetup(BasePermissionsSetup): def main(cls): keystone_user_id, keystone_group_id = cls.get_user_group() cls.initialize_fernet_repository( - keystone_user_id, keystone_group_id, 'fernet_tokens') + keystone_user_id, keystone_group_id, 'fernet_tokens' + ) class TokenRotate(BasePermissionsSetup): @@ -631,7 +723,8 @@ class TokenRotate(BasePermissionsSetup): def main(cls): keystone_user_id, keystone_group_id = cls.get_user_group() cls.rotate_fernet_repository( - keystone_user_id, keystone_group_id, 'fernet_tokens') + keystone_user_id, keystone_group_id, 'fernet_tokens' + ) class ReceiptSetup(BasePermissionsSetup): @@ -649,7 +742,8 @@ class ReceiptSetup(BasePermissionsSetup): def main(cls): keystone_user_id, keystone_group_id = cls.get_user_group() cls.initialize_fernet_repository( - keystone_user_id, keystone_group_id, 'fernet_receipts') + keystone_user_id, keystone_group_id, 'fernet_receipts' + ) class ReceiptRotate(BasePermissionsSetup): @@ -676,7 +770,8 @@ class ReceiptRotate(BasePermissionsSetup): def main(cls): keystone_user_id, keystone_group_id = cls.get_user_group() cls.rotate_fernet_repository( - keystone_user_id, keystone_group_id, 'fernet_receipts') + keystone_user_id, keystone_group_id, 'fernet_receipts' + ) class CredentialSetup(BasePermissionsSetup): @@ -695,15 +790,14 @@ class CredentialSetup(BasePermissionsSetup): futils = fernet_utils.FernetUtils( CONF.credential.key_repository, credential_fernet.MAX_ACTIVE_KEYS, - 'credential' + 'credential', ) keystone_user_id, keystone_group_id = cls.get_user_group() futils.create_key_directory(keystone_user_id, keystone_group_id) if futils.validate_key_repository(requires_write=True): futils.initialize_key_repository( - keystone_user_id, - keystone_group_id + keystone_user_id, keystone_group_id ) @@ -755,11 +849,13 @@ class CredentialRotate(BasePermissionsSetup): ) for credential in credentials: if credential['key_hash'] != primary_key_hash: - msg = _('Unable to rotate credential keys because not all ' - 'credentials are encrypted with the primary key. ' - 'Please make sure all credentials have been encrypted ' - 'with the primary key using `keystone-manage ' - 'credential_migrate`.') + msg = _( + 'Unable to rotate credential keys because not all ' + 'credentials are encrypted with the primary key. ' + 'Please make sure all credentials have been encrypted ' + 'with the primary key using `keystone-manage ' + 'credential_migrate`.' + ) raise SystemExit(msg) @classmethod @@ -767,7 +863,7 @@ class CredentialRotate(BasePermissionsSetup): futils = fernet_utils.FernetUtils( CONF.credential.key_repository, credential_fernet.MAX_ACTIVE_KEYS, - 'credential' + 'credential', ) keystone_user_id, keystone_group_id = cls.get_user_group() @@ -816,10 +912,7 @@ class CredentialMigrate(BasePermissionsSetup): credential['encrypted_blob'] ) cred = {'blob': decrypted_blob} - self.credential_api.update_credential( - credential['id'], - cred - ) + self.credential_api.update_credential(credential['id'], cred) @classmethod def main(cls): @@ -827,7 +920,7 @@ class CredentialMigrate(BasePermissionsSetup): futils = fernet_utils.FernetUtils( CONF.credential.key_repository, credential_fernet.MAX_ACTIVE_KEYS, - 'credential' + 'credential', ) futils.validate_key_repository(requires_write=True) klass = cls() @@ -843,25 +936,45 @@ class TrustFlush(BaseApp): def add_argument_parser(cls, subparsers): parser = super(TrustFlush, cls).add_argument_parser(subparsers) - parser.add_argument('--project-id', default=None, - help=('The id of the project of which the ' - 'expired or non-expired soft-deleted ' - 'trusts is to be purged')) - parser.add_argument('--trustor-user-id', default=None, - help=('The id of the trustor of which the ' - 'expired or non-expired soft-deleted ' - 'trusts is to be purged')) - parser.add_argument('--trustee-user-id', default=None, - help=('The id of the trustee of which the ' - 'expired or non-expired soft-deleted ' - 'trusts is to be purged')) - parser.add_argument('--date', default=datetime.datetime.utcnow(), - help=('The date of which the expired or ' - 'non-expired soft-deleted trusts older ' - 'than that will be purged. The format of ' - 'the date to be "DD-MM-YYYY". If no date ' - 'is supplied keystone-manage will use the ' - 'system clock time at runtime')) + parser.add_argument( + '--project-id', + default=None, + help=( + 'The id of the project of which the ' + 'expired or non-expired soft-deleted ' + 'trusts is to be purged' + ), + ) + parser.add_argument( + '--trustor-user-id', + default=None, + help=( + 'The id of the trustor of which the ' + 'expired or non-expired soft-deleted ' + 'trusts is to be purged' + ), + ) + parser.add_argument( + '--trustee-user-id', + default=None, + help=( + 'The id of the trustee of which the ' + 'expired or non-expired soft-deleted ' + 'trusts is to be purged' + ), + ) + parser.add_argument( + '--date', + default=datetime.datetime.utcnow(), + help=( + 'The date of which the expired or ' + 'non-expired soft-deleted trusts older ' + 'than that will be purged. The format of ' + 'the date to be "DD-MM-YYYY". If no date ' + 'is supplied keystone-manage will use the ' + 'system clock time at runtime' + ), + ) return parser @classmethod @@ -872,19 +985,24 @@ class TrustFlush(BaseApp): if not isinstance(CONF.command.date, datetime.datetime): try: CONF.command.date = datetime.datetime.strptime( - CONF.command.date, '%d-%m-%Y') + CONF.command.date, '%d-%m-%Y' + ) except KeyError: - raise ValueError("'%s'Invalid input for date, should be " - "DD-MM-YYYY", CONF.command.date) + raise ValueError( + "'%s'Invalid input for date, should be " "DD-MM-YYYY", + CONF.command.date, + ) else: - LOG.info("No date is supplied, keystone-manage will use the " - "system clock time at runtime ") + LOG.info( + "No date is supplied, keystone-manage will use the " + "system clock time at runtime " + ) trust_manager.flush_expired_and_soft_deleted_trusts( project_id=CONF.command.project_id, trustor_user_id=CONF.command.trustor_user_id, trustee_user_id=CONF.command.trustee_user_id, - date=CONF.command.date + date=CONF.command.date, ) @@ -896,20 +1014,33 @@ class MappingPurge(BaseApp): @classmethod def add_argument_parser(cls, subparsers): parser = super(MappingPurge, cls).add_argument_parser(subparsers) - parser.add_argument('--all', default=False, action='store_true', - help=('Purge all mappings.')) - parser.add_argument('--domain-name', default=None, - help=('Purge any mappings for the domain ' - 'specified.')) - parser.add_argument('--public-id', default=None, - help=('Purge the mapping for the Public ID ' - 'specified.')) - parser.add_argument('--local-id', default=None, - help=('Purge the mappings for the Local ID ' - 'specified.')) - parser.add_argument('--type', default=None, choices=['user', 'group'], - help=('Purge any mappings for the type ' - 'specified.')) + parser.add_argument( + '--all', + default=False, + action='store_true', + help=('Purge all mappings.'), + ) + parser.add_argument( + '--domain-name', + default=None, + help=('Purge any mappings for the domain ' 'specified.'), + ) + parser.add_argument( + '--public-id', + default=None, + help=('Purge the mapping for the Public ID ' 'specified.'), + ) + parser.add_argument( + '--local-id', + default=None, + help=('Purge the mappings for the Local ID ' 'specified.'), + ) + parser.add_argument( + '--type', + default=None, + choices=['user', 'group'], + help=('Purge any mappings for the type ' 'specified.'), + ) return parser @staticmethod @@ -922,27 +1053,36 @@ class MappingPurge(BaseApp): # dangerous as a default. So we use it in a slightly # unconventional way, where all parameters are optional, but you # must specify at least one. - if (CONF.command.all is False and - CONF.command.domain_name is None and - CONF.command.public_id is None and - CONF.command.local_id is None and - CONF.command.type is None): + if ( + CONF.command.all is False + and CONF.command.domain_name is None + and CONF.command.public_id is None + and CONF.command.local_id is None + and CONF.command.type is None + ): raise ValueError(_('At least one option must be provided')) - if (CONF.command.all is True and - (CONF.command.domain_name is not None or - CONF.command.public_id is not None or - CONF.command.local_id is not None or - CONF.command.type is not None)): - raise ValueError(_('--all option cannot be mixed with ' - 'other options')) + if CONF.command.all is True and ( + CONF.command.domain_name is not None + or CONF.command.public_id is not None + or CONF.command.local_id is not None + or CONF.command.type is not None + ): + raise ValueError( + _('--all option cannot be mixed with ' 'other options') + ) def get_domain_id(name): try: return resource_manager.get_domain_by_name(name)['id'] except KeyError: - raise ValueError(_("Unknown domain '%(name)s' specified by " - "--domain-name") % {'name': name}) + raise ValueError( + _( + "Unknown domain '%(name)s' specified by " + "--domain-name" + ) + % {'name': name} + ) validate_options() drivers = backends.load_backends() @@ -983,16 +1123,20 @@ def _domain_config_finder(conf_dir): LOG.info('Scanning %r for domain config files', conf_dir) for r, d, f in os.walk(conf_dir): for fname in f: - if (fname.startswith(DOMAIN_CONF_FHEAD) and - fname.endswith(DOMAIN_CONF_FTAIL)): + if fname.startswith(DOMAIN_CONF_FHEAD) and fname.endswith( + DOMAIN_CONF_FTAIL + ): if fname.count('.') >= 2: - domain_name = fname[len(DOMAIN_CONF_FHEAD): - -len(DOMAIN_CONF_FTAIL)] + domain_name = fname[ + len(DOMAIN_CONF_FHEAD) : -len(DOMAIN_CONF_FTAIL) + ] yield (os.path.join(r, fname), domain_name) continue - LOG.warning('Ignoring file (%s) while scanning ' - 'domain config directory', fname) + LOG.warning( + 'Ignoring file (%s) while scanning ' 'domain config directory', + fname, + ) class DomainConfigUploadFiles(object): @@ -1018,16 +1162,22 @@ class DomainConfigUploadFiles(object): parameters are optional, but you must specify at least one. """ - if (CONF.command.all is False and - CONF.command.domain_name is None): - print(_('At least one option must be provided, use either ' - '--all or --domain-name')) + if CONF.command.all is False and CONF.command.domain_name is None: + print( + _( + 'At least one option must be provided, use either ' + '--all or --domain-name' + ) + ) return False - if (CONF.command.all is True and - CONF.command.domain_name is not None): - print(_('The --all option cannot be used with ' - 'the --domain-name option')) + if CONF.command.all is True and CONF.command.domain_name is not None: + print( + _( + 'The --all option cannot be used with ' + 'the --domain-name option' + ) + ) return False return True @@ -1041,21 +1191,27 @@ class DomainConfigUploadFiles(object): """ try: - domain_ref = ( - self.resource_manager.get_domain_by_name(domain_name)) + domain_ref = self.resource_manager.get_domain_by_name(domain_name) except exception.DomainNotFound: - print(_('Invalid domain name: %(domain)s found in config file ' - 'name: %(file)s - ignoring this file.') % { - 'domain': domain_name, - 'file': file_name}) + print( + _( + 'Invalid domain name: %(domain)s found in config file ' + 'name: %(file)s - ignoring this file.' + ) + % {'domain': domain_name, 'file': file_name} + ) return False if self.domain_config_manager.get_config_with_sensitive_info( - domain_ref['id']): - print(_('Domain: %(domain)s already has a configuration ' - 'defined - ignoring file: %(file)s.') % { - 'domain': domain_name, - 'file': file_name}) + domain_ref['id'] + ): + print( + _( + 'Domain: %(domain)s already has a configuration ' + 'defined - ignoring file: %(file)s.' + ) + % {'domain': domain_name, 'file': file_name} + ) return False sections = {} @@ -1066,27 +1222,37 @@ class DomainConfigUploadFiles(object): # We explicitly don't try and differentiate the error cases, in # order to keep the code in this tool more robust as oslo.config # changes. - print(_('Error parsing configuration file for domain: %(domain)s, ' - 'file: %(file)s.') % { - 'domain': domain_name, - 'file': file_name}) + print( + _( + 'Error parsing configuration file for domain: %(domain)s, ' + 'file: %(file)s.' + ) + % {'domain': domain_name, 'file': file_name} + ) return False try: for group in sections: for option in sections[group]: sections[group][option] = sections[group][option][0] - self.domain_config_manager.create_config(domain_ref['id'], - sections) + self.domain_config_manager.create_config( + domain_ref['id'], sections + ) return True except Exception as e: - msg = ('Error processing config file for domain: ' - '%(domain_name)s, file: %(filename)s, error: %(error)s') - LOG.error(msg, - {'domain_name': domain_name, - 'filename': file_name, - 'error': e}, - exc_info=True) + msg = ( + 'Error processing config file for domain: ' + '%(domain_name)s, file: %(filename)s, error: %(error)s' + ) + LOG.error( + msg, + { + 'domain_name': domain_name, + 'filename': file_name, + 'error': e, + }, + exc_info=True, + ) return False def read_domain_configs_from_files(self): @@ -1108,7 +1274,8 @@ class DomainConfigUploadFiles(object): # Request is to upload the configs for just one domain fname = DOMAIN_CONF_FHEAD + domain_name + DOMAIN_CONF_FTAIL if not self._upload_config_to_database( - os.path.join(conf_dir, fname), domain_name): + os.path.join(conf_dir, fname), domain_name + ): return False return True @@ -1117,8 +1284,7 @@ class DomainConfigUploadFiles(object): for filename, domain_name in self._domain_config_finder(conf_dir): if self._upload_config_to_database(filename, domain_name): success_cnt += 1 - LOG.info('Successfully uploaded domain config %r', - filename) + LOG.info('Successfully uploaded domain config %r', filename) else: failure_cnt += 1 @@ -1136,8 +1302,12 @@ class DomainConfigUploadFiles(object): except Exception: # It is likely that there is some SQL or other backend error # related to set up - print(_('Unable to access the keystone database, please check it ' - 'is configured correctly.')) + print( + _( + 'Unable to access the keystone database, please check it ' + 'is configured correctly.' + ) + ) raise if not self.valid_options(): @@ -1155,16 +1325,23 @@ class DomainConfigUpload(BaseApp): @classmethod def add_argument_parser(cls, subparsers): parser = super(DomainConfigUpload, cls).add_argument_parser(subparsers) - parser.add_argument('--all', default=False, action='store_true', - help='Upload contents of all domain specific ' - 'configuration files. Either use this option ' - 'or use the --domain-name option to choose a ' - 'specific domain.') - parser.add_argument('--domain-name', default=None, - help='Upload contents of the specific ' - 'configuration file for the given domain. ' - 'Either use this option or use the --all ' - 'option to upload contents for all domains.') + parser.add_argument( + '--all', + default=False, + action='store_true', + help='Upload contents of all domain specific ' + 'configuration files. Either use this option ' + 'or use the --domain-name option to choose a ' + 'specific domain.', + ) + parser.add_argument( + '--domain-name', + default=None, + help='Upload contents of the specific ' + 'configuration file for the given domain. ' + 'Either use this option or use the --all ' + 'option to upload contents for all domains.', + ) return parser @staticmethod @@ -1205,8 +1382,10 @@ class MappingEngineTester(BaseApp): with open(path, "rb") as file: self.rules = jsonutils.load(file) except ValueError as e: - raise SystemExit(_('Error while parsing rules ' - '%(path)s: %(err)s') % {'path': path, 'err': e}) + raise SystemExit( + _('Error while parsing rules ' '%(path)s: %(err)s') + % {'path': path, 'err': e} + ) def read_assertion(self, path): self.assertion_pathname = path @@ -1214,8 +1393,10 @@ class MappingEngineTester(BaseApp): with open(path) as file: self.assertion = file.read().strip() except IOError as e: - raise SystemExit(_("Error while opening file " - "%(path)s: %(err)s") % {'path': path, 'err': e}) + raise SystemExit( + _("Error while opening file " "%(path)s: %(err)s") + % {'path': path, 'err': e} + ) LOG.debug("Assertions loaded: [%s].", self.assertion) @@ -1225,12 +1406,20 @@ class MappingEngineTester(BaseApp): k, v = line.split(':', 1) return k.strip(), v.strip() except ValueError: - msg = _("assertion file %(pathname)s at line %(line_num)d " - "expected 'key: value' but found '%(line)s' " - "see help for file format") - raise SystemExit(msg % {'pathname': self.assertion_pathname, - 'line_num': line_num, - 'line': line}) + msg = _( + "assertion file %(pathname)s at line %(line_num)d " + "expected 'key: value' but found '%(line)s' " + "see help for file format" + ) + raise SystemExit( + msg + % { + 'pathname': self.assertion_pathname, + 'line_num': line_num, + 'line': line, + } + ) + assertion = self.assertion.splitlines() assertion_dict = {} prefix = CONF.command.prefix @@ -1267,16 +1456,19 @@ class MappingEngineTester(BaseApp): attribute_mapping = tester.rules.copy() if CONF.command.mapping_schema_version: - attribute_mapping[ - 'schema_version'] = CONF.command.mapping_schema_version + attribute_mapping['schema_version'] = ( + CONF.command.mapping_schema_version + ) if not attribute_mapping.get('schema_version'): default_schema_version = '1.0' - LOG.warning('No schema version defined in rules [%s]. Therefore,' - 'we will use the default as [%s].', attribute_mapping, - default_schema_version) - attribute_mapping[ - 'schema_version'] = default_schema_version + LOG.warning( + 'No schema version defined in rules [%s]. Therefore,' + 'we will use the default as [%s].', + attribute_mapping, + default_schema_version, + ) + attribute_mapping['schema_version'] = default_schema_version LOG.info("Validating Attribute mapping rules [%s].", attribute_mapping) mapping_engine.validate_mapping_structure(attribute_mapping) @@ -1286,13 +1478,17 @@ class MappingEngineTester(BaseApp): tester.normalize_assertion() if CONF.command.engine_debug: - print("Using Rules:\n%s" % ( - jsonutils.dumps(tester.rules, indent=2))) - print("Using Assertion:\n%s" % ( - jsonutils.dumps(tester.assertion, indent=2))) + print( + "Using Rules:\n%s" % (jsonutils.dumps(tester.rules, indent=2)) + ) + print( + "Using Assertion:\n%s" + % (jsonutils.dumps(tester.assertion, indent=2)) + ) - rp = mapping_engine.RuleProcessor(tester.mapping_id, - tester.rules['rules']) + rp = mapping_engine.RuleProcessor( + tester.mapping_id, tester.rules['rules'] + ) mapped = rp.process(tester.assertion) LOG.info("Result of the attribute mapping processing.") @@ -1300,41 +1496,66 @@ class MappingEngineTester(BaseApp): @classmethod def add_argument_parser(cls, subparsers): - parser = super(MappingEngineTester, - cls).add_argument_parser(subparsers) + parser = super(MappingEngineTester, cls).add_argument_parser( + subparsers + ) parser.formatter_class = argparse.RawTextHelpFormatter - parser.add_argument('--rules', default=None, required=True, - help=("Path to the file with " - "rules to be executed. " - "Content must be\na proper JSON structure, " - "with a top-level key 'rules' and\n" - "corresponding value being a list.")) - parser.add_argument('--input', default=None, required=True, - help=("Path to the file with input attributes. " - "The content\nconsists of ':' separated " - "parameter names and their values.\nThere " - "is only one key-value pair per line. " - "A ';' in the\nvalue is a separator and " - "then a value is treated as a list.\n" - "Example:\n" - "\tEMAIL: me@example.com\n" - "\tLOGIN: me\n" - "\tGROUPS: group1;group2;group3")) - parser.add_argument('--prefix', default=None, - help=("A prefix used for each environment " - "variable in the\nassertion. For example, " - "all environment variables may have\nthe " - "prefix ASDF_.")) - parser.add_argument('--engine-debug', - default=False, action="store_true", - help=("Enable debug messages from the mapping " - "engine.")) - parser.add_argument('--mapping-schema-version', default=None, - required=False, - help=("The override for the schema version of " - "the rules that are loaded in the 'rules' " - "option of the test CLI.")) + parser.add_argument( + '--rules', + default=None, + required=True, + help=( + "Path to the file with " + "rules to be executed. " + "Content must be\na proper JSON structure, " + "with a top-level key 'rules' and\n" + "corresponding value being a list." + ), + ) + parser.add_argument( + '--input', + default=None, + required=True, + help=( + "Path to the file with input attributes. " + "The content\nconsists of ':' separated " + "parameter names and their values.\nThere " + "is only one key-value pair per line. " + "A ';' in the\nvalue is a separator and " + "then a value is treated as a list.\n" + "Example:\n" + "\tEMAIL: me@example.com\n" + "\tLOGIN: me\n" + "\tGROUPS: group1;group2;group3" + ), + ) + parser.add_argument( + '--prefix', + default=None, + help=( + "A prefix used for each environment " + "variable in the\nassertion. For example, " + "all environment variables may have\nthe " + "prefix ASDF_." + ), + ) + parser.add_argument( + '--engine-debug', + default=False, + action="store_true", + help=("Enable debug messages from the mapping " "engine."), + ) + parser.add_argument( + '--mapping-schema-version', + default=None, + required=False, + help=( + "The override for the schema version of " + "the rules that are loaded in the 'rules' " + "option of the test CLI." + ), + ) class MappingPopulate(BaseApp): @@ -1358,12 +1579,17 @@ class MappingPopulate(BaseApp): @classmethod def add_argument_parser(cls, subparsers): - parser = super(MappingPopulate, cls).add_argument_parser( - subparsers) + parser = super(MappingPopulate, cls).add_argument_parser(subparsers) - parser.add_argument('--domain-name', default=None, required=True, - help=("Name of the domain configured to use " - "domain-specific backend")) + parser.add_argument( + '--domain-name', + default=None, + required=True, + help=( + "Name of the domain configured to use " + "domain-specific backend" + ), + ) return parser @classmethod @@ -1374,8 +1600,9 @@ class MappingPopulate(BaseApp): try: domain_id = cls.resource_api.get_domain_by_name(domain_name)['id'] except exception.DomainNotFound: - print(_('Invalid domain name: %(domain)s') % { - 'domain': domain_name}) + print( + _('Invalid domain name: %(domain)s') % {'domain': domain_name} + ) return False # We don't actually need to tackle id_mapping_api in order to get # entries there, because list_users does this anyway. That's why it @@ -1405,7 +1632,7 @@ CMDS = [ TokenRotate, TokenSetup, TrustFlush, - UserSetup + UserSetup, ] @@ -1414,10 +1641,12 @@ def add_command_parsers(subparsers): cmd.add_argument_parser(subparsers) -command_opt = cfg.SubCommandOpt('command', - title='Commands', - help='Available commands', - handler=add_command_parsers) +command_opt = cfg.SubCommandOpt( + 'command', + title='Commands', + help='Available commands', + handler=add_command_parsers, +) def main(argv=None, developer_config_file=None): @@ -1451,11 +1680,13 @@ def main(argv=None, developer_config_file=None): # and is keystone specific. Only pass a list of arguments so that # oslo.config can determine configuration file locations based on user # provided arguments, if present. - CONF(args=argv[1:], - project='keystone', - version=pbr.version.VersionInfo('keystone').version_string(), - usage='%(prog)s [' + '|'.join([cmd.name for cmd in CMDS]) + ']', - default_config_files=developer_config_file) + CONF( + args=argv[1:], + project='keystone', + version=pbr.version.VersionInfo('keystone').version_string(), + usage='%(prog)s [' + '|'.join([cmd.name for cmd in CMDS]) + ']', + default_config_files=developer_config_file, + ) if not CONF.default_config_files and not user_supplied_config_file: LOG.warning('Config file not found, using default configs.') diff --git a/keystone/cmd/doctor/__init__.py b/keystone/cmd/doctor/__init__.py index 7aa62e48a5..0f30f2d7ba 100644 --- a/keystone/cmd/doctor/__init__.py +++ b/keystone/cmd/doctor/__init__.py @@ -35,7 +35,8 @@ SYMPTOM_MODULES = [ ldap, security_compliance, tokens, - tokens_fernet] + tokens_fernet, +] def diagnose(): @@ -50,8 +51,9 @@ def diagnose(): # Some symptoms may take a long time to check, so let's keep # curious users posted on our progress as we go. print( - 'Checking for %s...' % - symptom.__name__[len(SYMPTOM_PREFIX):].replace('_', ' ')) + 'Checking for %s...' + % symptom.__name__[len(SYMPTOM_PREFIX) :].replace('_', ' ') + ) # All symptoms are just callables that return true when they match the # condition that they're looking for. When that happens, we need to @@ -64,7 +66,9 @@ def diagnose(): # passing a string here. Also, we include a line break here to # visually separate the symptom's description from any other # checks -- it provides a better user experience. - print(_('\nWARNING: %s') % _(symptom.__doc__)) # noqa: See comment above. + print( + _('\nWARNING: %s') % _(symptom.__doc__) + ) # noqa: See comment above. return symptoms_found diff --git a/keystone/cmd/doctor/caching.py b/keystone/cmd/doctor/caching.py index c0e9590205..a5dcd7a919 100644 --- a/keystone/cmd/doctor/caching.py +++ b/keystone/cmd/doctor/caching.py @@ -44,10 +44,7 @@ def symptom_connection_to_memcached(): as dead. Please ensure `keystone.conf [cache] memcache_servers` is configured properly. """ - memcached_drivers = [ - 'dogpile.cache.memcached', - 'oslo_cache.memcache_pool' - ] + memcached_drivers = ['dogpile.cache.memcached', 'oslo_cache.memcache_pool'] if CONF.cache.enabled and CONF.cache.backend in memcached_drivers: cache.configure_cache() cache_stats = cache.CACHE_REGION.actual_backend.client.get_stats() diff --git a/keystone/cmd/doctor/credential.py b/keystone/cmd/doctor/credential.py index 54b11ede4c..8cae322acd 100644 --- a/keystone/cmd/doctor/credential.py +++ b/keystone/cmd/doctor/credential.py @@ -35,9 +35,7 @@ def symptom_unique_key_repositories(): Ensure `keystone.conf [credential] key_repository` and `keystone.conf [fernet_tokens] key_repository` are not pointing to the same location. """ - return ( - CONF.credential.key_repository == CONF.fernet_tokens.key_repository - ) + return CONF.credential.key_repository == CONF.fernet_tokens.key_repository def symptom_usability_of_credential_fernet_key_repository(): @@ -50,11 +48,12 @@ def symptom_usability_of_credential_fernet_key_repository(): fernet_utils = utils.FernetUtils( CONF.credential.key_repository, credential_fernet.MAX_ACTIVE_KEYS, - 'credential' + 'credential', ) return ( 'fernet' in CONF.credential.provider - and not fernet_utils.validate_key_repository()) + and not fernet_utils.validate_key_repository() + ) def symptom_keys_in_credential_fernet_key_repository(): @@ -68,8 +67,8 @@ def symptom_keys_in_credential_fernet_key_repository(): fernet_utils = utils.FernetUtils( CONF.credential.key_repository, credential_fernet.MAX_ACTIVE_KEYS, - 'credential' + 'credential', ) return ( - 'fernet' in CONF.credential.provider - and not fernet_utils.load_keys()) + 'fernet' in CONF.credential.provider and not fernet_utils.load_keys() + ) diff --git a/keystone/cmd/doctor/database.py b/keystone/cmd/doctor/database.py index 95c5bdd870..00a9035ebd 100644 --- a/keystone/cmd/doctor/database.py +++ b/keystone/cmd/doctor/database.py @@ -26,4 +26,5 @@ def symptom_database_connection_is_not_SQLite(): """ # noqa: D403 return ( CONF.database.connection is not None - and 'sqlite' in CONF.database.connection) + and 'sqlite' in CONF.database.connection + ) diff --git a/keystone/cmd/doctor/ldap.py b/keystone/cmd/doctor/ldap.py index cc1bd8a706..3091737f77 100644 --- a/keystone/cmd/doctor/ldap.py +++ b/keystone/cmd/doctor/ldap.py @@ -30,7 +30,8 @@ def symptom_LDAP_user_enabled_emulation_dn_ignored(): """ return ( not CONF.ldap.user_enabled_emulation - and CONF.ldap.user_enabled_emulation_dn is not None) + and CONF.ldap.user_enabled_emulation_dn is not None + ) def symptom_LDAP_user_enabled_emulation_use_group_config_ignored(): @@ -41,7 +42,8 @@ def symptom_LDAP_user_enabled_emulation_use_group_config_ignored(): """ return ( not CONF.ldap.user_enabled_emulation - and CONF.ldap.user_enabled_emulation_use_group_config) + and CONF.ldap.user_enabled_emulation_use_group_config + ) def symptom_LDAP_group_members_are_ids_disabled(): @@ -55,7 +57,8 @@ def symptom_LDAP_group_members_are_ids_disabled(): """ return ( CONF.ldap.group_objectclass == 'posixGroup' - and not CONF.ldap.group_members_are_ids) + and not CONF.ldap.group_members_are_ids + ) def symptom_LDAP_file_based_domain_specific_configs(): @@ -69,8 +72,10 @@ def symptom_LDAP_file_based_domain_specific_configs(): `keystone.conf [identity] domain_configurations_from_database` being set to `false`. """ - if (not CONF.identity.domain_specific_drivers_enabled or - CONF.identity.domain_configurations_from_database): + if ( + not CONF.identity.domain_specific_drivers_enabled + or CONF.identity.domain_configurations_from_database + ): return False invalid_files = [] @@ -81,10 +86,12 @@ def symptom_LDAP_file_based_domain_specific_configs(): invalid_files.append(filename) if invalid_files: invalid_str = ', '.join(invalid_files) - print('Warning: The following non-config files were found: %s\n' - 'If they are intended to be config files then rename them ' - 'to the form of `keystone..conf`. ' - 'Otherwise, ignore this warning' % invalid_str) + print( + 'Warning: The following non-config files were found: %s\n' + 'If they are intended to be config files then rename them ' + 'to the form of `keystone..conf`. ' + 'Otherwise, ignore this warning' % invalid_str + ) return True else: print('Could not find directory ', filedir) @@ -122,9 +129,11 @@ def symptom_LDAP_file_based_domain_specific_configs_formatted_correctly(): # there is no point in continuing with this check. # symptom_LDAP_file_based_domain_specific_config will catch and # report this issue. - if (not CONF.identity.domain_specific_drivers_enabled or - CONF.identity.domain_configurations_from_database or - not os.path.isdir(filedir)): + if ( + not CONF.identity.domain_specific_drivers_enabled + or CONF.identity.domain_configurations_from_database + or not os.path.isdir(filedir) + ): return False invalid_files = [] @@ -138,8 +147,10 @@ def symptom_LDAP_file_based_domain_specific_configs_formatted_correctly(): if invalid_files: invalid_str = ', '.join(invalid_files) - print('Error: The following config files are formatted incorrectly: ', - invalid_str) + print( + 'Error: The following config files are formatted incorrectly: ', + invalid_str, + ) return True return False diff --git a/keystone/cmd/doctor/security_compliance.py b/keystone/cmd/doctor/security_compliance.py index bbed10d2f8..c23af877c5 100644 --- a/keystone/cmd/doctor/security_compliance.py +++ b/keystone/cmd/doctor/security_compliance.py @@ -60,5 +60,7 @@ def symptom_password_regular_expression_description_not_set(): Ensure `[security_compliance] password_regex_description` is set with a description of your password regular expression in a language for humans. """ - return (CONF.security_compliance.password_regex and not - CONF.security_compliance.password_regex_description) + return ( + CONF.security_compliance.password_regex + and not CONF.security_compliance.password_regex_description + ) diff --git a/keystone/cmd/doctor/tokens.py b/keystone/cmd/doctor/tokens.py index 8bd03c2b81..a098653d08 100644 --- a/keystone/cmd/doctor/tokens.py +++ b/keystone/cmd/doctor/tokens.py @@ -32,4 +32,4 @@ def symptom_unreasonable_max_token_size(): depending on the IDs returned from LDAP, resulting in longer Fernet tokens (adjust your `max_token_size` accordingly). """ - return ('fernet' in CONF.token.provider and CONF.max_token_size > 255) + return 'fernet' in CONF.token.provider and CONF.max_token_size > 255 diff --git a/keystone/cmd/doctor/tokens_fernet.py b/keystone/cmd/doctor/tokens_fernet.py index e0e7a5bdd3..ae27cfd6d8 100644 --- a/keystone/cmd/doctor/tokens_fernet.py +++ b/keystone/cmd/doctor/tokens_fernet.py @@ -28,11 +28,12 @@ def symptom_usability_of_Fernet_key_repository(): fernet_utils = utils.FernetUtils( CONF.fernet_tokens.key_repository, CONF.fernet_tokens.max_active_keys, - 'fernet_tokens' + 'fernet_tokens', ) return ( 'fernet' in CONF.token.provider - and not fernet_utils.validate_key_repository()) + and not fernet_utils.validate_key_repository() + ) def symptom_keys_in_Fernet_key_repository(): @@ -46,8 +47,6 @@ def symptom_keys_in_Fernet_key_repository(): fernet_utils = utils.FernetUtils( CONF.fernet_tokens.key_repository, CONF.fernet_tokens.max_active_keys, - 'fernet_tokens' + 'fernet_tokens', ) - return ( - 'fernet' in CONF.token.provider - and not fernet_utils.load_keys()) + return 'fernet' in CONF.token.provider and not fernet_utils.load_keys() diff --git a/keystone/cmd/idutils.py b/keystone/cmd/idutils.py index e878063631..dd09e18f63 100644 --- a/keystone/cmd/idutils.py +++ b/keystone/cmd/idutils.py @@ -51,13 +51,15 @@ class Identity(object): 'id': project_id, 'domain_id': self.default_domain_id, 'description': 'Bootstrap project for initializing the cloud.', - 'name': self.project_name + 'name': self.project_name, } PROVIDERS.resource_api.create_project(project_id, project) LOG.info('Created project %s', self.project_name) except exception.Conflict: - LOG.info('Project %s already exists, skipping creation.', - self.project_name) + LOG.info( + 'Project %s already exists, skipping creation.', + self.project_name, + ) project = PROVIDERS.resource_api.get_project_by_name( self.project_name, self.default_domain_id ) @@ -75,7 +77,8 @@ class Identity(object): PROVIDERS.resource_api.get_domain(domain_id) _self._assert_default_project_id_is_not_domain( - user_ref.get('default_project_id')) + user_ref.get('default_project_id') + ) # For creating a user, the domain is in the object itself domain_id = user_ref['domain_id'] @@ -88,7 +91,8 @@ class Identity(object): ref = _self._create_user_with_federated_objects(user, driver) notifications.Audit.created(_self._USER, user['id'], initiator) return _self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) + ref, domain_id, driver, mapping.EntityType.USER + ) def user_setup(self): # NOTE(morganfainberg): Do not create the user if it already exists. @@ -96,12 +100,15 @@ class Identity(object): user = PROVIDERS.identity_api.get_user_by_name( self.user_name, self.default_domain_id ) - LOG.info('User %s already exists, skipping creation.', - self.user_name) + LOG.info( + 'User %s already exists, skipping creation.', self.user_name + ) if self.user_id is not None and user['id'] != self.user_id: - msg = (f'user `{self.user_name}` already exists ' - f'with `{self.user_id}`') + msg = ( + f'user `{self.user_name}` already exists ' + f'with `{self.user_id}`' + ) raise exception.Conflict(type='user_id', details=msg) # If the user is not enabled, re-enable them. This also helps @@ -128,9 +135,7 @@ class Identity(object): # or the user was previously disabled. This allows bootstrap to act # as a recovery tool, without having to create a new user. if update: - user = PROVIDERS.identity_api.update_user( - user['id'], update - ) + user = PROVIDERS.identity_api.update_user(user['id'], update) LOG.info('Reset password for user %s.', self.user_name) if not enabled and user['enabled']: # Although we always try to enable the user, this log @@ -143,7 +148,7 @@ class Identity(object): 'name': self.user_name, 'enabled': True, 'domain_id': self.default_domain_id, - 'password': self.user_password + 'password': self.user_password, } ) LOG.info('Created user %s', self.user_name) diff --git a/keystone/cmd/manage.py b/keystone/cmd/manage.py index 89498c84cf..0d523d0f66 100644 --- a/keystone/cmd/manage.py +++ b/keystone/cmd/manage.py @@ -22,13 +22,10 @@ from keystone.cmd import cli # If ../../keystone/__init__.py exists, add ../../ to Python search path, so # that it will override what happens to be installed in # /usr/(local/)lib/python... -possible_topdir = os.path.normpath(os.path.join(os.path.abspath(__file__), - os.pardir, - os.pardir, - os.pardir)) -if os.path.exists(os.path.join(possible_topdir, - 'keystone', - '__init__.py')): +possible_topdir = os.path.normpath( + os.path.join(os.path.abspath(__file__), os.pardir, os.pardir, os.pardir) +) +if os.path.exists(os.path.join(possible_topdir, 'keystone', '__init__.py')): sys.path.insert(0, possible_topdir) diff --git a/keystone/cmd/status.py b/keystone/cmd/status.py index 667840ca30..d21af98ce6 100644 --- a/keystone/cmd/status.py +++ b/keystone/cmd/status.py @@ -44,7 +44,7 @@ class Checks(upgradecheck.UpgradeCommands): 'identity:delete_trust', 'identity:get_trust', 'identity:list_roles_for_trust', - 'identity:get_role_for_trust' + 'identity:get_role_for_trust', ] failed_rules = [] for rule in rules: @@ -59,17 +59,22 @@ class Checks(upgradecheck.UpgradeCommands): "these rules to be fully permissive as hardcoded enforcement " "will be removed. To correct this issue, either stop " "overriding these rules in config to accept the defaults, or " - "explicitly set check strings that are not empty." % - "\", \"".join(failed_rules) + "explicitly set check strings that are not empty." + % "\", \"".join(failed_rules), ) return upgradecheck.Result( - upgradecheck.Code.SUCCESS, 'Trust policies are safe.') + upgradecheck.Code.SUCCESS, 'Trust policies are safe.' + ) def check_default_roles_are_immutable(self): hints = driver_hints.Hints() hints.add_filter('domain_id', None) # Only check global roles roles = PROVIDERS.role_api.list_roles(hints=hints) - default_roles = ('admin', 'member', 'reader',) + default_roles = ( + 'admin', + 'member', + 'reader', + ) failed_roles = [] for role in [r for r in roles if r['name'] in default_roles]: if not role.get('options', {}).get('immutable'): @@ -77,18 +82,25 @@ class Checks(upgradecheck.UpgradeCommands): if any(failed_roles): return upgradecheck.Result( upgradecheck.Code.FAILURE, - "Roles are not immutable: %s" % ", ".join(failed_roles) + "Roles are not immutable: %s" % ", ".join(failed_roles), ) return upgradecheck.Result( - upgradecheck.Code.SUCCESS, "Default roles are immutable.") + upgradecheck.Code.SUCCESS, "Default roles are immutable." + ) _upgrade_checks = ( - ("Check trust policies are not empty", - check_trust_policies_are_not_empty), - ("Check default roles are immutable", - check_default_roles_are_immutable), - ("Policy File JSON to YAML Migration", - (common_checks.check_policy_json, {'conf': CONF})), + ( + "Check trust policies are not empty", + check_trust_policies_are_not_empty, + ), + ( + "Check default roles are immutable", + check_default_roles_are_immutable, + ), + ( + "Policy File JSON to YAML Migration", + (common_checks.check_policy_json, {'conf': CONF}), + ), ) diff --git a/keystone/common/cache/_context_cache.py b/keystone/common/cache/_context_cache.py index 8c526065c5..e17464e87a 100644 --- a/keystone/common/cache/_context_cache.py +++ b/keystone/common/cache/_context_cache.py @@ -55,8 +55,9 @@ class _ResponseCacheProxy(proxy.ProxyBackend): return api.NO_VALUE value = msgpackutils.loads(value) - return api.CachedValue(payload=value['payload'], - metadata=value['metadata']) + return api.CachedValue( + payload=value['payload'], metadata=value['metadata'] + ) def _delete_local_cache(self, key): # On invalidate/delete remove the value from the local request cache @@ -91,8 +92,9 @@ class _ResponseCacheProxy(proxy.ProxyBackend): if v is not api.NO_VALUE: values[key] = v query_keys = set(keys).difference(set(values.keys())) - values.update(dict( - zip(query_keys, self.proxied.get_multi(query_keys)))) + values.update( + dict(zip(query_keys, self.proxied.get_multi(query_keys))) + ) return [values[k] for k in keys] def set_multi(self, mapping): diff --git a/keystone/common/cache/core.py b/keystone/common/cache/core.py index fb9fc1ca85..6a67ba824a 100644 --- a/keystone/common/cache/core.py +++ b/keystone/common/cache/core.py @@ -41,7 +41,8 @@ class RegionInvalidationManager(object): @property def region_id(self): return self._invalidation_region.get_or_create( - self._region_key, self._generate_new_id, expiration_time=-1) + self._region_key, self._generate_new_id, expiration_time=-1 + ) def invalidate_region(self): new_region_id = self._generate_new_id() @@ -87,6 +88,7 @@ def key_mangler_factory(invalidation_manager, orig_key_mangler): if orig_key_mangler: key = orig_key_mangler(key) return key + return key_mangler @@ -127,11 +129,14 @@ def configure_cache(region=None): region.wrap(_context_cache._ResponseCacheProxy) region_manager = RegionInvalidationManager( - CACHE_INVALIDATION_REGION, region.name) + CACHE_INVALIDATION_REGION, region.name + ) region.key_mangler = key_mangler_factory( - region_manager, region.key_mangler) + region_manager, region.key_mangler + ) region.region_invalidator = DistributedInvalidationStrategy( - region_manager) + region_manager + ) def _sha1_mangle_key(key): @@ -161,7 +166,8 @@ def configure_invalidation_region(): config_dict['expiration_time'] = None # we don't want an expiration CACHE_INVALIDATION_REGION.configure_from_config( - config_dict, '%s.' % CONF.cache.config_prefix) + config_dict, '%s.' % CONF.cache.config_prefix + ) # NOTE(breton): Wrap the cache invalidation region to avoid excessive # calls to memcached, which would result in poor performance. @@ -179,5 +185,6 @@ def configure_invalidation_region(): def get_memoization_decorator(group, expiration_group=None, region=None): if region is None: region = CACHE_REGION - return cache.get_memoization_decorator(CONF, region, group, - expiration_group=expiration_group) + return cache.get_memoization_decorator( + CONF, region, group, expiration_group=expiration_group + ) diff --git a/keystone/common/context.py b/keystone/common/context.py index 441e6a6f76..a4694c6ba4 100644 --- a/keystone/common/context.py +++ b/keystone/common/context.py @@ -17,8 +17,9 @@ REQUEST_CONTEXT_ENV = 'keystone.oslo_request_context' def _prop(name): - return property(lambda x: getattr(x, name), - lambda x, y: setattr(x, name, y)) + return property( + lambda x: getattr(x, name), lambda x, y: setattr(x, name, y) + ) class RequestContext(oslo_context.RequestContext): diff --git a/keystone/common/driver_hints.py b/keystone/common/driver_hints.py index a79dddc9f3..54903d45ce 100644 --- a/keystone/common/driver_hints.py +++ b/keystone/common/driver_hints.py @@ -31,12 +31,16 @@ def truncated(f): 'truncated' boolean to 'true' in the hints limit dict. """ + @functools.wraps(f) def wrapper(self, hints, *args, **kwargs): if not hasattr(hints, 'limit'): raise exception.UnexpectedError( - _('Cannot truncate a driver call without hints list as ' - 'first parameter after self ')) + _( + 'Cannot truncate a driver call without hints list as ' + 'first parameter after self ' + ) + ) if hints.limit is None or hints.filters: return f(self, hints, *args, **kwargs) @@ -55,6 +59,7 @@ def truncated(f): else: hints.set_limit(list_limit) return ref_list + return wrapper @@ -94,17 +99,23 @@ class Hints(object): self.filters = list() self.cannot_match = False - def add_filter(self, name, value, comparator='equals', - case_sensitive=False): + def add_filter( + self, name, value, comparator='equals', case_sensitive=False + ): """Add a filter to the filters list, which is publicly accessible.""" - self.filters.append({'name': name, 'value': value, - 'comparator': comparator, - 'case_sensitive': case_sensitive}) + self.filters.append( + { + 'name': name, + 'value': value, + 'comparator': comparator, + 'case_sensitive': case_sensitive, + } + ) def get_exact_filter_by_name(self, name): """Return a filter key and value if exact filter exists for name.""" for entry in self.filters: - if (entry['name'] == name and entry['comparator'] == 'equals'): + if entry['name'] == name and entry['comparator'] == 'equals': return entry def set_limit(self, limit, truncated=False): diff --git a/keystone/common/fernet_utils.py b/keystone/common/fernet_utils.py index cb28f332fd..913fcfe32b 100644 --- a/keystone/common/fernet_utils.py +++ b/keystone/common/fernet_utils.py @@ -36,8 +36,7 @@ NULL_KEY = base64.urlsafe_b64encode(b'\x00' * 32) class FernetUtils(object): - def __init__(self, key_repository, max_active_keys, - config_group): + def __init__(self, key_repository, max_active_keys, config_group): self.key_repository = key_repository self.max_active_keys = max_active_keys self.config_group = config_group @@ -48,36 +47,43 @@ class FernetUtils(object): # passed in as None because we don't set allow_no_values to True. # ensure current user has sufficient access to the key repository - is_valid = (os.access(self.key_repository, os.R_OK) and - os.access(self.key_repository, os.X_OK)) + is_valid = os.access(self.key_repository, os.R_OK) and os.access( + self.key_repository, os.X_OK + ) if requires_write: - is_valid = (is_valid and - os.access(self.key_repository, os.W_OK)) + is_valid = is_valid and os.access(self.key_repository, os.W_OK) if not is_valid: LOG.error( 'Either [%(config_group)s] key_repository does not exist ' 'or Keystone does not have sufficient permission to ' 'access it: %(key_repo)s', - {'key_repo': self.key_repository, - 'config_group': self.config_group}) + { + 'key_repo': self.key_repository, + 'config_group': self.config_group, + }, + ) else: # ensure the key repository isn't world-readable stat_info = os.stat(self.key_repository) - if (stat_info.st_mode & stat.S_IROTH or - stat_info.st_mode & stat.S_IXOTH): + if ( + stat_info.st_mode & stat.S_IROTH + or stat_info.st_mode & stat.S_IXOTH + ): LOG.warning( - 'key_repository is world readable: %s', - self.key_repository) + 'key_repository is world readable: %s', self.key_repository + ) return is_valid - def create_key_directory(self, keystone_user_id=None, - keystone_group_id=None): + def create_key_directory( + self, keystone_user_id=None, keystone_group_id=None + ): """Attempt to create the key directory if it doesn't exist.""" utils.create_directory( - self.key_repository, keystone_user_id=keystone_user_id, - keystone_group_id=keystone_group_id + self.key_repository, + keystone_user_id=keystone_user_id, + keystone_group_id=keystone_group_id, ) def _create_new_key(self, keystone_user_id, keystone_group_id): @@ -110,7 +116,9 @@ class FernetUtils(object): LOG.warning( 'Unable to change the ownership of the new key without a ' 'keystone user ID and keystone group ID both being provided: ' - '%s', self.key_repository) + '%s', + self.key_repository, + ) # Determine the file name of the new key key_file = os.path.join(self.key_repository, '0.tmp') create_success = False @@ -163,15 +171,19 @@ class FernetUtils(object): else: key = key_file.read() if len(key) == 0: - LOG.warning('Ignoring empty key found in key ' - 'repository: %s', path) + LOG.warning( + 'Ignoring empty key found in key ' + 'repository: %s', + path, + ) continue key_files[key_id] = path keys[key_id] = key return key_files, keys - def initialize_key_repository(self, keystone_user_id=None, - keystone_group_id=None): + def initialize_key_repository( + self, keystone_user_id=None, keystone_group_id=None + ): """Create a key repository and bootstrap it with a key. :param keystone_user_id: User ID of the Keystone user. @@ -179,8 +191,7 @@ class FernetUtils(object): """ # make sure we have work to do before proceeding - if os.access(os.path.join(self.key_repository, '0'), - os.F_OK): + if os.access(os.path.join(self.key_repository, '0'), os.F_OK): LOG.info('Key repository is already initialized; aborting.') return @@ -213,10 +224,10 @@ class FernetUtils(object): # read the list of key files key_files, _ = self._get_key_files(self.key_repository) - LOG.info('Starting key rotation with %(count)s key files: ' - '%(list)s', { - 'count': len(key_files), - 'list': list(key_files.values())}) + LOG.info( + 'Starting key rotation with %(count)s key files: ' '%(list)s', + {'count': len(key_files), 'list': list(key_files.values())}, + ) # add a tmp new key to the rotation, which will be the *next* primary self._create_tmp_new_key(keystone_user_id, keystone_group_id) @@ -230,12 +241,12 @@ class FernetUtils(object): # promote the next primary key to be the primary os.rename( os.path.join(self.key_repository, '0'), - os.path.join(self.key_repository, str(new_primary_key)) + os.path.join(self.key_repository, str(new_primary_key)), ) key_files.pop(0) key_files[new_primary_key] = os.path.join( - self.key_repository, - str(new_primary_key)) + self.key_repository, str(new_primary_key) + ) LOG.info('Promoted key 0 to be the primary: %s', new_primary_key) # rename the tmp key to the real staged key @@ -279,14 +290,20 @@ class FernetUtils(object): # sense to log this message for tokens since credentials doesn't # have a `max_active_key` configuration option. if self.key_repository == CONF.fernet_tokens.key_repository: - msg = ('Loaded %(count)d Fernet keys from %(dir)s, but ' - '`[fernet_tokens] max_active_keys = %(max)d`; perhaps ' - 'there have not been enough key rotations to reach ' - '`max_active_keys` yet?') - LOG.debug(msg, { - 'count': len(keys), - 'max': self.max_active_keys, - 'dir': self.key_repository}) + msg = ( + 'Loaded %(count)d Fernet keys from %(dir)s, but ' + '`[fernet_tokens] max_active_keys = %(max)d`; perhaps ' + 'there have not been enough key rotations to reach ' + '`max_active_keys` yet?' + ) + LOG.debug( + msg, + { + 'count': len(keys), + 'max': self.max_active_keys, + 'dir': self.key_repository, + }, + ) # return the encryption_keys, sorted by key number, descending key_list = [keys[x] for x in sorted(keys.keys(), reverse=True)] diff --git a/keystone/common/json_home.py b/keystone/common/json_home.py index ef4f61f914..ec64ae7485 100644 --- a/keystone/common/json_home.py +++ b/keystone/common/json_home.py @@ -19,27 +19,35 @@ from keystone.i18n import _ def build_v3_resource_relation(resource_name): - return ('https://docs.openstack.org/api/openstack-identity/3/rel/%s' % - resource_name) + return ( + 'https://docs.openstack.org/api/openstack-identity/3/rel/%s' + % resource_name + ) -def build_v3_extension_resource_relation(extension_name, extension_version, - resource_name): +def build_v3_extension_resource_relation( + extension_name, extension_version, resource_name +): return ( 'https://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/rel/' - '%s' % (extension_name, extension_version, resource_name)) + '%s' % (extension_name, extension_version, resource_name) + ) def build_v3_parameter_relation(parameter_name): - return ('https://docs.openstack.org/api/openstack-identity/3/param/%s' % - parameter_name) + return ( + 'https://docs.openstack.org/api/openstack-identity/3/param/%s' + % parameter_name + ) -def build_v3_extension_parameter_relation(extension_name, extension_version, - parameter_name): +def build_v3_extension_parameter_relation( + extension_name, extension_version, parameter_name +): return ( 'https://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/param/' - '%s' % (extension_name, extension_version, parameter_name)) + '%s' % (extension_name, extension_version, parameter_name) + ) class Parameters(object): @@ -58,9 +66,9 @@ class Parameters(object): REGISTERED_LIMIT_ID = build_v3_parameter_relation('registered_limit_id') LIMIT_ID = build_v3_parameter_relation('limit_id') APPLICATION_CRED_ID = build_v3_parameter_relation( - 'application_credential_id') - ACCESS_RULE_ID = build_v3_parameter_relation( - 'access_rule_id') + 'application_credential_id' + ) + ACCESS_RULE_ID = build_v3_parameter_relation('access_rule_id') class Status(object): @@ -81,8 +89,10 @@ class Status(object): resource_data['hints'] = {'status': status} return - raise exception.Error(message=_( - 'Unexpected status requested for JSON Home response, %s') % status) + raise exception.Error( + message=_('Unexpected status requested for JSON Home response, %s') + % status + ) class JsonHomeResources(object): diff --git a/keystone/common/jwt_utils.py b/keystone/common/jwt_utils.py index 9f0aa9b093..b250807eb6 100644 --- a/keystone/common/jwt_utils.py +++ b/keystone/common/jwt_utils.py @@ -29,7 +29,7 @@ def create_jws_keypair(private_key_path, public_key_path): private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption() + encryption_algorithm=serialization.NoEncryption(), ) ) @@ -38,6 +38,6 @@ def create_jws_keypair(private_key_path, public_key_path): f.write( public_key.public_bytes( encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo + format=serialization.PublicFormat.SubjectPublicKeyInfo, ) ) diff --git a/keystone/common/manager.py b/keystone/common/manager.py index 70f91f2d82..89cc272fda 100644 --- a/keystone/common/manager.py +++ b/keystone/common/manager.py @@ -51,6 +51,7 @@ def response_truncated(f): project). """ + @functools.wraps(f) def wrapper(self, *args, **kwargs): if kwargs.get('hints') is None: @@ -60,18 +61,18 @@ def response_truncated(f): if list_limit: kwargs['hints'].set_limit(list_limit) return f(self, *args, **kwargs) + return wrapper def load_driver(namespace, driver_name, *args): try: - driver_manager = stevedore.DriverManager(namespace, - driver_name, - invoke_on_load=True, - invoke_args=args) + driver_manager = stevedore.DriverManager( + namespace, driver_name, invoke_on_load=True, invoke_args=args + ) return driver_manager.driver except stevedore.exception.NoMatches: - msg = (_('Unable to find %(name)r driver in %(namespace)r.')) + msg = _('Unable to find %(name)r driver in %(namespace)r.') raise ImportError(msg % {'name': driver_name, 'namespace': namespace}) @@ -89,7 +90,7 @@ class _TraceMeta(type): __fn_info = '%(module)s.%(classname)s.%(funcname)s' % { 'module': inspect.getmodule(__f).__name__, 'classname': __classname, - 'funcname': __f.__name__ + 'funcname': __f.__name__, } # NOTE(morganfainberg): Omit "cls" and "self" when printing trace logs # the index can be calculated at wrap time rather than at runtime. @@ -115,35 +116,46 @@ class _TraceMeta(type): if __do_trace: __subst = { 'run_time': (time.time() - __t), - 'passed_args': ', '.join([ - ', '.join([repr(a) - for a in args[__arg_idx:]]), - ', '.join(['%(k)s=%(v)r' % {'k': k, 'v': v} - for k, v in kwargs.items()]), - ]), + 'passed_args': ', '.join( + [ + ', '.join([repr(a) for a in args[__arg_idx:]]), + ', '.join( + [ + '%(k)s=%(v)r' % {'k': k, 'v': v} + for k, v in kwargs.items() + ] + ), + ] + ), 'function': __fn_info, 'exception': __exc, 'ret_val': __ret_val, } if __exc is not None: - __msg = ('[%(run_time)ss] %(function)s ' - '(%(passed_args)s) => raised ' - '%(exception)r') + __msg = ( + '[%(run_time)ss] %(function)s ' + '(%(passed_args)s) => raised ' + '%(exception)r' + ) else: # TODO(morganfainberg): find a way to indicate if this # was a cache hit or cache miss. - __msg = ('[%(run_time)ss] %(function)s' - '(%(passed_args)s) => %(ret_val)r') + __msg = ( + '[%(run_time)ss] %(function)s' + '(%(passed_args)s) => %(ret_val)r' + ) LOG.trace(__msg, __subst) return __ret_val + return wrapped def __new__(meta, classname, bases, class_dict): final_cls_dict = {} for attr_name, attr in class_dict.items(): # NOTE(morganfainberg): only wrap public instances and methods. - if (isinstance(attr, types.FunctionType) and - not attr_name.startswith('_')): + if isinstance( + attr, types.FunctionType + ) and not attr_name.startswith('_'): attr = _TraceMeta.wrapper(attr, classname) final_cls_dict[attr_name] = attr return type.__new__(meta, classname, bases, final_cls_dict) @@ -167,16 +179,19 @@ class Manager(object, metaclass=_TraceMeta): def __init__(self, driver_name): if self._provides_api is None: - raise ValueError('Programming Error: All managers must provide an ' - 'API that can be referenced by other components ' - 'of Keystone.') + raise ValueError( + 'Programming Error: All managers must provide an ' + 'API that can be referenced by other components ' + 'of Keystone.' + ) if driver_name is not None: self.driver = load_driver(self.driver_namespace, driver_name) self.__register_provider_api() def __register_provider_api(self): provider_api.ProviderAPIs._register_provider_api( - name=self._provides_api, obj=self) + name=self._provides_api, obj=self + ) def __getattr__(self, name): """Forward calls to the underlying driver. diff --git a/keystone/common/password_hashing.py b/keystone/common/password_hashing.py index ddc4094f17..d459ffed40 100644 --- a/keystone/common/password_hashing.py +++ b/keystone/common/password_hashing.py @@ -26,11 +26,15 @@ from keystone.i18n import _ CONF = keystone.conf.CONF LOG = log.getLogger(__name__) -SUPPORTED_HASHERS = frozenset([passlib.hash.bcrypt, - passlib.hash.bcrypt_sha256, - passlib.hash.scrypt, - passlib.hash.pbkdf2_sha512, - passlib.hash.sha512_crypt]) +SUPPORTED_HASHERS = frozenset( + [ + passlib.hash.bcrypt, + passlib.hash.bcrypt_sha256, + passlib.hash.scrypt, + passlib.hash.pbkdf2_sha512, + passlib.hash.sha512_crypt, + ] +) _HASHER_NAME_MAP = {hasher.name: hasher for hasher in SUPPORTED_HASHERS} @@ -55,20 +59,24 @@ def _get_hash_ident(hashers): _HASHER_IDENT_MAP = { - prefix: module for module, prefix in itertools.chain( - *[zip([mod] * len(ident), ident) - for mod, ident in _get_hash_ident(SUPPORTED_HASHERS)] + prefix: module + for module, prefix in itertools.chain( + *[ + zip([mod] * len(ident), ident) + for mod, ident in _get_hash_ident(SUPPORTED_HASHERS) + ] ) } def _get_hasher_from_ident(hashed): try: - return _HASHER_IDENT_MAP[hashed[0:hashed.index('$', 1) + 1]] + return _HASHER_IDENT_MAP[hashed[0 : hashed.index('$', 1) + 1]] except KeyError: raise ValueError( - _('Unsupported password hashing algorithm ident: %s') % - hashed[0:hashed.index('$', 1) + 1]) + _('Unsupported password hashing algorithm ident: %s') + % hashed[0 : hashed.index('$', 1) + 1] + ) def verify_length_and_trunc_password(password): @@ -84,8 +92,10 @@ def verify_length_and_trunc_password(password): # bytes are fully mixed. See: # https://passlib.readthedocs.io/en/stable/lib/passlib.hash.bcrypt.html#security-issues BCRYPT_MAX_LENGTH = 72 - if (CONF.identity.password_hash_algorithm == 'bcrypt' and # nosec: B105 - CONF.identity.max_password_length > BCRYPT_MAX_LENGTH): + if ( + CONF.identity.password_hash_algorithm == 'bcrypt' # nosec: B105 + and CONF.identity.max_password_length > BCRYPT_MAX_LENGTH + ): msg = "Truncating password to algorithm specific maximum length %d characters." LOG.warning(msg, BCRYPT_MAX_LENGTH) max_length = BCRYPT_MAX_LENGTH @@ -139,8 +149,9 @@ def hash_password(password): if hasher is None: raise RuntimeError( - _('Password Hash Algorithm %s not found') % - CONF.identity.password_hash_algorithm) + _('Password Hash Algorithm %s not found') + % CONF.identity.password_hash_algorithm + ) if CONF.identity.password_hash_rounds: params['rounds'] = CONF.identity.password_hash_rounds diff --git a/keystone/common/policies/access_rule.py b/keystone/common/policies/access_rule.py index bab6c7ddc4..0e8e3947eb 100644 --- a/keystone/common/policies/access_rule.py +++ b/keystone/common/policies/access_rule.py @@ -20,13 +20,11 @@ collection_path = '/v3/users/{user_id}/access_rules' resource_path = collection_path + '/{access_rule_id}' SYSTEM_READER_OR_OWNER = ( - '(' + base.SYSTEM_READER + ') or ' - 'user_id:%(target.user.id)s' + '(' + base.SYSTEM_READER + ') or ' 'user_id:%(target.user.id)s' ) SYSTEM_ADMIN_OR_OWNER = ( - '(' + base.SYSTEM_ADMIN + ') or ' - 'user_id:%(target.user.id)s' + '(' + base.SYSTEM_ADMIN + ') or ' 'user_id:%(target.user.id)s' ) access_rule_policies = [ @@ -35,26 +33,28 @@ access_rule_policies = [ check_str=SYSTEM_READER_OR_OWNER, scope_types=['system', 'project'], description='Show access rule details.', - operations=[{'path': resource_path, - 'method': 'GET'}, - {'path': resource_path, - 'method': 'HEAD'}]), + operations=[ + {'path': resource_path, 'method': 'GET'}, + {'path': resource_path, 'method': 'HEAD'}, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_access_rules', check_str=SYSTEM_READER_OR_OWNER, scope_types=['system', 'project'], description='List access rules for a user.', - operations=[{'path': collection_path, - 'method': 'GET'}, - {'path': collection_path, - 'method': 'HEAD'}]), + operations=[ + {'path': collection_path, 'method': 'GET'}, + {'path': collection_path, 'method': 'HEAD'}, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_access_rule', check_str=SYSTEM_ADMIN_OR_OWNER, scope_types=['system', 'project'], description='Delete an access_rule.', - operations=[{'path': resource_path, - 'method': 'DELETE'}]) + operations=[{'path': resource_path, 'method': 'DELETE'}], + ), ] diff --git a/keystone/common/policies/access_token.py b/keystone/common/policies/access_token.py index 00b7e8be9b..72bbed0b2f 100644 --- a/keystone/common/policies/access_token.py +++ b/keystone/common/policies/access_token.py @@ -23,47 +23,85 @@ access_token_policies = [ # order to access these APIs. scope_types=['project'], description='Authorize OAUTH1 request token.', - operations=[{'path': '/v3/OS-OAUTH1/authorize/{request_token_id}', - 'method': 'PUT'}]), + operations=[ + { + 'path': '/v3/OS-OAUTH1/authorize/{request_token_id}', + 'method': 'PUT', + } + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_access_token', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['project'], description='Get OAUTH1 access token for user by access token ID.', - operations=[{'path': ('/v3/users/{user_id}/OS-OAUTH1/access_tokens/' - '{access_token_id}'), - 'method': 'GET'}]), + operations=[ + { + 'path': ( + '/v3/users/{user_id}/OS-OAUTH1/access_tokens/' + '{access_token_id}' + ), + 'method': 'GET', + } + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_access_token_role', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['project'], description='Get role for user OAUTH1 access token.', - operations=[{'path': ('/v3/users/{user_id}/OS-OAUTH1/access_tokens/' - '{access_token_id}/roles/{role_id}'), - 'method': 'GET'}]), + operations=[ + { + 'path': ( + '/v3/users/{user_id}/OS-OAUTH1/access_tokens/' + '{access_token_id}/roles/{role_id}' + ), + 'method': 'GET', + } + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_access_tokens', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['project'], description='List OAUTH1 access tokens for user.', - operations=[{'path': '/v3/users/{user_id}/OS-OAUTH1/access_tokens', - 'method': 'GET'}]), + operations=[ + { + 'path': '/v3/users/{user_id}/OS-OAUTH1/access_tokens', + 'method': 'GET', + } + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_access_token_roles', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['project'], description='List OAUTH1 access token roles.', - operations=[{'path': ('/v3/users/{user_id}/OS-OAUTH1/access_tokens/' - '{access_token_id}/roles'), - 'method': 'GET'}]), + operations=[ + { + 'path': ( + '/v3/users/{user_id}/OS-OAUTH1/access_tokens/' + '{access_token_id}/roles' + ), + 'method': 'GET', + } + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_access_token', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['project'], description='Delete OAUTH1 access token.', - operations=[{'path': ('/v3/users/{user_id}/OS-OAUTH1/access_tokens/' - '{access_token_id}'), - 'method': 'DELETE'}]) + operations=[ + { + 'path': ( + '/v3/users/{user_id}/OS-OAUTH1/access_tokens/' + '{access_token_id}' + ), + 'method': 'DELETE', + } + ], + ), ] diff --git a/keystone/common/policies/application_credential.py b/keystone/common/policies/application_credential.py index 12958d8cd2..f128164fd8 100644 --- a/keystone/common/policies/application_credential.py +++ b/keystone/common/policies/application_credential.py @@ -27,19 +27,19 @@ deprecated_list_application_credentials_for_user = policy.DeprecatedRule( name=base.IDENTITY % 'list_application_credentials', check_str=base.RULE_ADMIN_OR_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_get_application_credentials_for_user = policy.DeprecatedRule( name=base.IDENTITY % 'get_application_credential', check_str=base.RULE_ADMIN_OR_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_application_credentials_for_user = policy.DeprecatedRule( name=base.IDENTITY % 'delete_application_credential', check_str=base.RULE_ADMIN_OR_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -49,36 +49,38 @@ application_credential_policies = [ check_str=base.ADMIN_OR_SYSTEM_READER_OR_OWNER, scope_types=['system', 'project'], description='Show application credential details.', - operations=[{'path': resource_path, - 'method': 'GET'}, - {'path': resource_path, - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_application_credentials_for_user), + operations=[ + {'path': resource_path, 'method': 'GET'}, + {'path': resource_path, 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_get_application_credentials_for_user, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_application_credentials', check_str=base.ADMIN_OR_SYSTEM_READER_OR_OWNER, scope_types=['system', 'project'], description='List application credentials for a user.', - operations=[{'path': collection_path, - 'method': 'GET'}, - {'path': collection_path, - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_application_credentials_for_user), + operations=[ + {'path': collection_path, 'method': 'GET'}, + {'path': collection_path, 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_application_credentials_for_user, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_application_credential', check_str=base.RULE_OWNER, scope_types=['project'], description='Create an application credential.', - operations=[{'path': collection_path, - 'method': 'POST'}]), + operations=[{'path': collection_path, 'method': 'POST'}], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_application_credential', check_str=base.RULE_ADMIN_OR_OWNER, scope_types=['system', 'project'], description='Delete an application credential.', - operations=[{'path': resource_path, - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_application_credentials_for_user) + operations=[{'path': resource_path, 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_application_credentials_for_user, + ), ] diff --git a/keystone/common/policies/auth.py b/keystone/common/policies/auth.py index 2d4742b239..aab57a823a 100644 --- a/keystone/common/policies/auth.py +++ b/keystone/common/policies/auth.py @@ -20,63 +20,41 @@ auth_policies = [ check_str='', description='Get service catalog.', operations=[ - { - 'path': '/v3/auth/catalog', - 'method': 'GET' - }, - { - 'path': '/v3/auth/catalog', - 'method': 'HEAD' - } - ] + {'path': '/v3/auth/catalog', 'method': 'GET'}, + {'path': '/v3/auth/catalog', 'method': 'HEAD'}, + ], ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_auth_projects', check_str='', - description=('List all projects a user has access to via role ' - 'assignments.'), + description=( + 'List all projects a user has access to via role ' 'assignments.' + ), operations=[ - { - 'path': '/v3/auth/projects', - 'method': 'GET' - }, - { - 'path': '/v3/auth/projects', - 'method': 'HEAD' - } - ] + {'path': '/v3/auth/projects', 'method': 'GET'}, + {'path': '/v3/auth/projects', 'method': 'HEAD'}, + ], ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_auth_domains', check_str='', - description=('List all domains a user has access to via role ' - 'assignments.'), + description=( + 'List all domains a user has access to via role ' 'assignments.' + ), operations=[ - { - 'path': '/v3/auth/domains', - 'method': 'GET' - }, - { - 'path': '/v3/auth/domains', - 'method': 'HEAD' - } - ] + {'path': '/v3/auth/domains', 'method': 'GET'}, + {'path': '/v3/auth/domains', 'method': 'HEAD'}, + ], ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_auth_system', check_str='', description='List systems a user has access to via role assignments.', operations=[ - { - 'path': '/v3/auth/system', - 'method': 'GET' - }, - { - 'path': '/v3/auth/system', - 'method': 'HEAD' - } - ] - ) + {'path': '/v3/auth/system', 'method': 'GET'}, + {'path': '/v3/auth/system', 'method': 'HEAD'}, + ], + ), ] diff --git a/keystone/common/policies/base.py b/keystone/common/policies/base.py index 39df9eaa28..4797d22d7a 100644 --- a/keystone/common/policies/base.py +++ b/keystone/common/policies/base.py @@ -18,17 +18,19 @@ RULE_OWNER = 'user_id:%(user_id)s' RULE_ADMIN_OR_OWNER = 'rule:admin_or_owner' RULE_ADMIN_OR_CREDENTIAL_OWNER = ( 'rule:admin_required or ' - '(rule:owner and user_id:%(target.credential.user_id)s)') + '(rule:owner and user_id:%(target.credential.user_id)s)' +) RULE_ADMIN_OR_TARGET_DOMAIN = ( - 'rule:admin_required or ' - 'token.project.domain.id:%(target.domain.id)s') + 'rule:admin_required or ' 'token.project.domain.id:%(target.domain.id)s' +) RULE_ADMIN_OR_TARGET_PROJECT = ( - 'rule:admin_required or ' - 'project_id:%(target.project.id)s') + 'rule:admin_required or ' 'project_id:%(target.project.id)s' +) RULE_ADMIN_OR_TOKEN_SUBJECT = 'rule:admin_or_token_subject' # nosec RULE_REVOKE_EVENT_OR_ADMIN = 'rule:revoke_event_or_admin' RULE_SERVICE_ADMIN_OR_TOKEN_SUBJECT = ( - 'rule:service_admin_or_token_subject') # nosec + 'rule:service_admin_or_token_subject' # nosec +) RULE_SERVICE_OR_ADMIN = 'rule:service_or_admin' RULE_TRUST_OWNER = 'user_id:%(trust.trustor_user_id)s' @@ -49,8 +51,7 @@ SYSTEM_ADMIN = 'role:admin and system_scope:all' DOMAIN_READER = 'role:reader and domain_id:%(target.domain_id)s' RULE_SYSTEM_ADMIN_OR_OWNER = '(' + SYSTEM_ADMIN + ') or rule:owner' ADMIN_OR_SYSTEM_READER_OR_OWNER = ( - '(' + RULE_ADMIN_REQUIRED + ') or ' - '(' + SYSTEM_READER + ') or rule:owner' + '(' + RULE_ADMIN_REQUIRED + ') or ' '(' + SYSTEM_READER + ') or rule:owner' ) RULE_ADMIN_OR_SYSTEM_READER = 'rule:admin_required or (' + SYSTEM_READER + ')' @@ -61,35 +62,33 @@ ADMIN_OR_SYSTEM_READER_OR_CRED_OWNER = ( 'or user_id:%(target.credential.user_id)s' ) ADMIN_OR_CRED_OWNER = ( - '(' + RULE_ADMIN_REQUIRED + ') ' - 'or user_id:%(target.credential.user_id)s' + '(' + RULE_ADMIN_REQUIRED + ') ' 'or user_id:%(target.credential.user_id)s' ) rules = [ policy.RuleDefault( - name='admin_required', - check_str='role:admin or is_admin:1'), - policy.RuleDefault( - name='service_role', - check_str='role:service'), + name='admin_required', check_str='role:admin or is_admin:1' + ), + policy.RuleDefault(name='service_role', check_str='role:service'), policy.RuleDefault( name='service_or_admin', - check_str='rule:admin_required or rule:service_role'), + check_str='rule:admin_required or rule:service_role', + ), + policy.RuleDefault(name='owner', check_str=RULE_OWNER), policy.RuleDefault( - name='owner', - check_str=RULE_OWNER), + name='admin_or_owner', check_str='rule:admin_required or rule:owner' + ), policy.RuleDefault( - name='admin_or_owner', - check_str='rule:admin_required or rule:owner'), - policy.RuleDefault( - name='token_subject', - check_str='user_id:%(target.token.user_id)s'), + name='token_subject', check_str='user_id:%(target.token.user_id)s' + ), policy.RuleDefault( name='admin_or_token_subject', - check_str='rule:admin_required or rule:token_subject'), + check_str='rule:admin_required or rule:token_subject', + ), policy.RuleDefault( name='service_admin_or_token_subject', - check_str='rule:service_or_admin or rule:token_subject'), + check_str='rule:service_or_admin or rule:token_subject', + ), ] diff --git a/keystone/common/policies/consumer.py b/keystone/common/policies/consumer.py index 650660a6f0..ecebf98c8d 100644 --- a/keystone/common/policies/consumer.py +++ b/keystone/common/policies/consumer.py @@ -23,31 +23,31 @@ deprecated_get_consumer = policy.DeprecatedRule( name=base.IDENTITY % 'get_consumer', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_consumers = policy.DeprecatedRule( name=base.IDENTITY % 'list_consumers', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_consumer = policy.DeprecatedRule( name=base.IDENTITY % 'create_consumer', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_update_consumer = policy.DeprecatedRule( name=base.IDENTITY % 'update_consumer', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_consumer = policy.DeprecatedRule( name=base.IDENTITY % 'delete_consumer', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -57,41 +57,53 @@ consumer_policies = [ check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Show OAUTH1 consumer details.', - operations=[{'path': '/v3/OS-OAUTH1/consumers/{consumer_id}', - 'method': 'GET'}], - deprecated_rule=deprecated_get_consumer), + operations=[ + {'path': '/v3/OS-OAUTH1/consumers/{consumer_id}', 'method': 'GET'} + ], + deprecated_rule=deprecated_get_consumer, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_consumers', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List OAUTH1 consumers.', - operations=[{'path': '/v3/OS-OAUTH1/consumers', - 'method': 'GET'}], - deprecated_rule=deprecated_list_consumers), + operations=[{'path': '/v3/OS-OAUTH1/consumers', 'method': 'GET'}], + deprecated_rule=deprecated_list_consumers, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_consumer', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create OAUTH1 consumer.', - operations=[{'path': '/v3/OS-OAUTH1/consumers', - 'method': 'POST'}], - deprecated_rule=deprecated_create_consumer), + operations=[{'path': '/v3/OS-OAUTH1/consumers', 'method': 'POST'}], + deprecated_rule=deprecated_create_consumer, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_consumer', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update OAUTH1 consumer.', - operations=[{'path': '/v3/OS-OAUTH1/consumers/{consumer_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_consumer), + operations=[ + { + 'path': '/v3/OS-OAUTH1/consumers/{consumer_id}', + 'method': 'PATCH', + } + ], + deprecated_rule=deprecated_update_consumer, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_consumer', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete OAUTH1 consumer.', - operations=[{'path': '/v3/OS-OAUTH1/consumers/{consumer_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_consumer), + operations=[ + { + 'path': '/v3/OS-OAUTH1/consumers/{consumer_id}', + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_consumer, + ), ] diff --git a/keystone/common/policies/credential.py b/keystone/common/policies/credential.py index 84a62d7ea3..77532ce625 100644 --- a/keystone/common/policies/credential.py +++ b/keystone/common/policies/credential.py @@ -23,31 +23,31 @@ deprecated_get_credential = policy.DeprecatedRule( name=base.IDENTITY % 'get_credential', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_credentials = policy.DeprecatedRule( name=base.IDENTITY % 'list_credentials', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_credential = policy.DeprecatedRule( name=base.IDENTITY % 'create_credential', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_credential = policy.DeprecatedRule( name=base.IDENTITY % 'update_credential', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_credential = policy.DeprecatedRule( name=base.IDENTITY % 'delete_credential', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -57,8 +57,9 @@ credential_policies = [ check_str=base.ADMIN_OR_SYSTEM_READER_OR_CRED_OWNER, scope_types=['system', 'domain', 'project'], description='Show credentials details.', - operations=[{'path': '/v3/credentials/{credential_id}', - 'method': 'GET'}], + operations=[ + {'path': '/v3/credentials/{credential_id}', 'method': 'GET'} + ], deprecated_rule=deprecated_get_credential, ), policy.DocumentedRuleDefault( @@ -66,8 +67,7 @@ credential_policies = [ check_str=base.ADMIN_OR_SYSTEM_READER_OR_CRED_OWNER, scope_types=['system', 'domain', 'project'], description='List credentials.', - operations=[{'path': '/v3/credentials', - 'method': 'GET'}], + operations=[{'path': '/v3/credentials', 'method': 'GET'}], deprecated_rule=deprecated_list_credentials, ), policy.DocumentedRuleDefault( @@ -75,8 +75,7 @@ credential_policies = [ check_str=base.ADMIN_OR_CRED_OWNER, scope_types=['system', 'domain', 'project'], description='Create credential.', - operations=[{'path': '/v3/credentials', - 'method': 'POST'}], + operations=[{'path': '/v3/credentials', 'method': 'POST'}], deprecated_rule=deprecated_create_credential, ), policy.DocumentedRuleDefault( @@ -84,8 +83,9 @@ credential_policies = [ check_str=base.ADMIN_OR_CRED_OWNER, scope_types=['system', 'domain', 'project'], description='Update credential.', - operations=[{'path': '/v3/credentials/{credential_id}', - 'method': 'PATCH'}], + operations=[ + {'path': '/v3/credentials/{credential_id}', 'method': 'PATCH'} + ], deprecated_rule=deprecated_update_credential, ), policy.DocumentedRuleDefault( @@ -93,10 +93,11 @@ credential_policies = [ check_str=base.ADMIN_OR_CRED_OWNER, scope_types=['system', 'domain', 'project'], description='Delete credential.', - operations=[{'path': '/v3/credentials/{credential_id}', - 'method': 'DELETE'}], + operations=[ + {'path': '/v3/credentials/{credential_id}', 'method': 'DELETE'} + ], deprecated_rule=deprecated_delete_credential, - ) + ), ] diff --git a/keystone/common/policies/domain.py b/keystone/common/policies/domain.py index f2be68d11d..75b5c1a4cb 100644 --- a/keystone/common/policies/domain.py +++ b/keystone/common/policies/domain.py @@ -23,31 +23,31 @@ deprecated_list_domains = policy.DeprecatedRule( name=base.IDENTITY % 'list_domains', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_get_domain = policy.DeprecatedRule( name=base.IDENTITY % 'get_domain', check_str=base.RULE_ADMIN_OR_TARGET_DOMAIN, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_domain = policy.DeprecatedRule( name=base.IDENTITY % 'update_domain', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_domain = policy.DeprecatedRule( name=base.IDENTITY % 'create_domain', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_domain = policy.DeprecatedRule( name=base.IDENTITY % 'delete_domain', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) ADMIN_OR_SYSTEM_USER_OR_DOMAIN_USER_OR_PROJECT_USER = ( base.RULE_ADMIN_REQUIRED + ' or ' @@ -69,41 +69,41 @@ domain_policies = [ check_str=ADMIN_OR_SYSTEM_USER_OR_DOMAIN_USER_OR_PROJECT_USER, scope_types=['system', 'domain', 'project'], description='Show domain details.', - operations=[{'path': '/v3/domains/{domain_id}', - 'method': 'GET'}], - deprecated_rule=deprecated_get_domain), + operations=[{'path': '/v3/domains/{domain_id}', 'method': 'GET'}], + deprecated_rule=deprecated_get_domain, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_domains', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], description='List domains.', - operations=[{'path': '/v3/domains', - 'method': 'GET'}], - deprecated_rule=deprecated_list_domains), + operations=[{'path': '/v3/domains', 'method': 'GET'}], + deprecated_rule=deprecated_list_domains, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_domain', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create domain.', - operations=[{'path': '/v3/domains', - 'method': 'POST'}], - deprecated_rule=deprecated_create_domain), + operations=[{'path': '/v3/domains', 'method': 'POST'}], + deprecated_rule=deprecated_create_domain, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_domain', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update domain.', - operations=[{'path': '/v3/domains/{domain_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_domain), + operations=[{'path': '/v3/domains/{domain_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_domain, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_domain', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete domain.', - operations=[{'path': '/v3/domains/{domain_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_domain), + operations=[{'path': '/v3/domains/{domain_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_domain, + ), ] diff --git a/keystone/common/policies/domain_config.py b/keystone/common/policies/domain_config.py index 4fda085e2a..7fa3dda06d 100644 --- a/keystone/common/policies/domain_config.py +++ b/keystone/common/policies/domain_config.py @@ -23,35 +23,35 @@ deprecated_get_domain_config = policy.DeprecatedRule( name=base.IDENTITY % 'get_domain_config', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_get_domain_config_default = policy.DeprecatedRule( name=base.IDENTITY % 'get_domain_config_default', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_domain_config = policy.DeprecatedRule( name=base.IDENTITY % 'create_domain_config', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_update_domain_config = policy.DeprecatedRule( name=base.IDENTITY % 'update_domain_config', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_domain_config = policy.DeprecatedRule( name=base.IDENTITY % 'delete_domain_config', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -62,45 +62,38 @@ domain_config_policies = [ scope_types=['system', 'project'], description='Create domain configuration.', operations=[ - { - 'path': '/v3/domains/{domain_id}/config', - 'method': 'PUT' - } + {'path': '/v3/domains/{domain_id}/config', 'method': 'PUT'} ], - deprecated_rule=deprecated_create_domain_config + deprecated_rule=deprecated_create_domain_config, ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_domain_config', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], - description=('Get the entire domain configuration for a domain, an ' - 'option group within a domain, or a specific ' - 'configuration option within a group for a domain.'), + description=( + 'Get the entire domain configuration for a domain, an ' + 'option group within a domain, or a specific ' + 'configuration option within a group for a domain.' + ), operations=[ + {'path': '/v3/domains/{domain_id}/config', 'method': 'GET'}, + {'path': '/v3/domains/{domain_id}/config', 'method': 'HEAD'}, { - 'path': '/v3/domains/{domain_id}/config', - 'method': 'GET' - }, - { - 'path': '/v3/domains/{domain_id}/config', - 'method': 'HEAD' + 'path': '/v3/domains/{domain_id}/config/{group}', + 'method': 'GET', }, { 'path': '/v3/domains/{domain_id}/config/{group}', - 'method': 'GET' - }, - { - 'path': '/v3/domains/{domain_id}/config/{group}', - 'method': 'HEAD' + 'method': 'HEAD', }, { 'path': '/v3/domains/{domain_id}/config/{group}/{option}', - 'method': 'GET' + 'method': 'GET', }, { 'path': '/v3/domains/{domain_id}/config/{group}/{option}', - 'method': 'HEAD' - } + 'method': 'HEAD', + }, ], deprecated_rule=deprecated_get_domain_config, ), @@ -110,48 +103,53 @@ domain_config_policies = [ # This should be accessible to anyone with a valid token, regardless of # system-scope or project-scope. scope_types=['system', 'domain', 'project'], - description=('Get security compliance domain configuration for ' - 'either a domain or a specific option in a domain.'), + description=( + 'Get security compliance domain configuration for ' + 'either a domain or a specific option in a domain.' + ), operations=[ { 'path': '/v3/domains/{domain_id}/config/security_compliance', - 'method': 'GET' + 'method': 'GET', }, { 'path': '/v3/domains/{domain_id}/config/security_compliance', - 'method': 'HEAD' + 'method': 'HEAD', }, { - 'path': ('/v3/domains/{domain_id}/config/' - 'security_compliance/{option}'), - 'method': 'GET' + 'path': ( + '/v3/domains/{domain_id}/config/' + 'security_compliance/{option}' + ), + 'method': 'GET', }, { - 'path': ('/v3/domains/{domain_id}/config/' - 'security_compliance/{option}'), - 'method': 'HEAD' - } + 'path': ( + '/v3/domains/{domain_id}/config/' + 'security_compliance/{option}' + ), + 'method': 'HEAD', + }, ], ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_domain_config', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], - description=('Update domain configuration for either a domain, ' - 'specific group or a specific option in a group.'), + description=( + 'Update domain configuration for either a domain, ' + 'specific group or a specific option in a group.' + ), operations=[ - { - 'path': '/v3/domains/{domain_id}/config', - 'method': 'PATCH' - }, + {'path': '/v3/domains/{domain_id}/config', 'method': 'PATCH'}, { 'path': '/v3/domains/{domain_id}/config/{group}', - 'method': 'PATCH' + 'method': 'PATCH', }, { 'path': '/v3/domains/{domain_id}/config/{group}/{option}', - 'method': 'PATCH' - } + 'method': 'PATCH', + }, ], deprecated_rule=deprecated_update_domain_config, ), @@ -159,21 +157,20 @@ domain_config_policies = [ name=base.IDENTITY % 'delete_domain_config', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], - description=('Delete domain configuration for either a domain, ' - 'specific group or a specific option in a group.'), + description=( + 'Delete domain configuration for either a domain, ' + 'specific group or a specific option in a group.' + ), operations=[ - { - 'path': '/v3/domains/{domain_id}/config', - 'method': 'DELETE' - }, + {'path': '/v3/domains/{domain_id}/config', 'method': 'DELETE'}, { 'path': '/v3/domains/{domain_id}/config/{group}', - 'method': 'DELETE' + 'method': 'DELETE', }, { 'path': '/v3/domains/{domain_id}/config/{group}/{option}', - 'method': 'DELETE' - } + 'method': 'DELETE', + }, ], deprecated_rule=deprecated_delete_domain_config, ), @@ -181,36 +178,26 @@ domain_config_policies = [ name=base.IDENTITY % 'get_domain_config_default', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], - description=('Get domain configuration default for either a domain, ' - 'specific group or a specific option in a group.'), + description=( + 'Get domain configuration default for either a domain, ' + 'specific group or a specific option in a group.' + ), operations=[ + {'path': '/v3/domains/config/default', 'method': 'GET'}, + {'path': '/v3/domains/config/default', 'method': 'HEAD'}, + {'path': '/v3/domains/config/{group}/default', 'method': 'GET'}, + {'path': '/v3/domains/config/{group}/default', 'method': 'HEAD'}, { - 'path': '/v3/domains/config/default', - 'method': 'GET' - }, - { - 'path': '/v3/domains/config/default', - 'method': 'HEAD' - }, - { - 'path': '/v3/domains/config/{group}/default', - 'method': 'GET' - }, - { - 'path': '/v3/domains/config/{group}/default', - 'method': 'HEAD' + 'path': '/v3/domains/config/{group}/{option}/default', + 'method': 'GET', }, { 'path': '/v3/domains/config/{group}/{option}/default', - 'method': 'GET' + 'method': 'HEAD', }, - { - 'path': '/v3/domains/config/{group}/{option}/default', - 'method': 'HEAD' - } ], deprecated_rule=deprecated_get_domain_config_default, - ) + ), ] diff --git a/keystone/common/policies/ec2_credential.py b/keystone/common/policies/ec2_credential.py index 0a876587ce..9acf77a6a9 100644 --- a/keystone/common/policies/ec2_credential.py +++ b/keystone/common/policies/ec2_credential.py @@ -23,25 +23,25 @@ deprecated_ec2_get_credential = policy.DeprecatedRule( name=base.IDENTITY % 'ec2_get_credential', check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_ec2_list_credentials = policy.DeprecatedRule( name=base.IDENTITY % 'ec2_list_credentials', check_str=base.RULE_ADMIN_OR_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_ec2_create_credential = policy.DeprecatedRule( name=base.IDENTITY % 'ec2_create_credential', check_str=base.RULE_ADMIN_OR_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_ec2_delete_credential = policy.DeprecatedRule( name=base.IDENTITY % 'ec2_delete_credential', check_str=base.RULE_ADMIN_OR_CREDENTIAL_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -51,18 +51,24 @@ ec2_credential_policies = [ check_str=base.ADMIN_OR_SYSTEM_READER_OR_CRED_OWNER, scope_types=['system', 'project'], description='Show ec2 credential details.', - operations=[{'path': ('/v3/users/{user_id}/credentials/OS-EC2/' - '{credential_id}'), - 'method': 'GET'}], - deprecated_rule=deprecated_ec2_get_credential + operations=[ + { + 'path': ( + '/v3/users/{user_id}/credentials/OS-EC2/' '{credential_id}' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_ec2_get_credential, ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'ec2_list_credentials', check_str=base.ADMIN_OR_SYSTEM_READER_OR_OWNER, scope_types=['system', 'project'], description='List ec2 credentials.', - operations=[{'path': '/v3/users/{user_id}/credentials/OS-EC2', - 'method': 'GET'}], + operations=[ + {'path': '/v3/users/{user_id}/credentials/OS-EC2', 'method': 'GET'} + ], deprecated_rule=deprecated_ec2_list_credentials, ), policy.DocumentedRuleDefault( @@ -70,8 +76,12 @@ ec2_credential_policies = [ check_str=base.RULE_ADMIN_OR_OWNER, scope_types=['system', 'project'], description='Create ec2 credential.', - operations=[{'path': '/v3/users/{user_id}/credentials/OS-EC2', - 'method': 'POST'}], + operations=[ + { + 'path': '/v3/users/{user_id}/credentials/OS-EC2', + 'method': 'POST', + } + ], deprecated_rule=deprecated_ec2_create_credential, ), policy.DocumentedRuleDefault( @@ -79,11 +89,16 @@ ec2_credential_policies = [ check_str=base.ADMIN_OR_CRED_OWNER, scope_types=['system', 'project'], description='Delete ec2 credential.', - operations=[{'path': ('/v3/users/{user_id}/credentials/OS-EC2/' - '{credential_id}'), - 'method': 'DELETE'}], + operations=[ + { + 'path': ( + '/v3/users/{user_id}/credentials/OS-EC2/' '{credential_id}' + ), + 'method': 'DELETE', + } + ], deprecated_rule=deprecated_ec2_delete_credential, - ) + ), ] diff --git a/keystone/common/policies/endpoint.py b/keystone/common/policies/endpoint.py index 2a0390827d..fa4f0386d2 100644 --- a/keystone/common/policies/endpoint.py +++ b/keystone/common/policies/endpoint.py @@ -20,29 +20,34 @@ DEPRECATED_REASON = ( ) deprecated_get_endpoint = policy.DeprecatedRule( - name=base.IDENTITY % 'get_endpoint', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'get_endpoint', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_endpoints = policy.DeprecatedRule( - name=base.IDENTITY % 'list_endpoints', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'list_endpoints', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_endpoint = policy.DeprecatedRule( - name=base.IDENTITY % 'update_endpoint', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'update_endpoint', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_endpoint = policy.DeprecatedRule( - name=base.IDENTITY % 'create_endpoint', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'create_endpoint', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_endpoint = policy.DeprecatedRule( - name=base.IDENTITY % 'delete_endpoint', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'delete_endpoint', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -52,41 +57,45 @@ endpoint_policies = [ check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Show endpoint details.', - operations=[{'path': '/v3/endpoints/{endpoint_id}', - 'method': 'GET'}], - deprecated_rule=deprecated_get_endpoint), + operations=[{'path': '/v3/endpoints/{endpoint_id}', 'method': 'GET'}], + deprecated_rule=deprecated_get_endpoint, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_endpoints', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List endpoints.', - operations=[{'path': '/v3/endpoints', - 'method': 'GET'}], - deprecated_rule=deprecated_list_endpoints), + operations=[{'path': '/v3/endpoints', 'method': 'GET'}], + deprecated_rule=deprecated_list_endpoints, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_endpoint', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create endpoint.', - operations=[{'path': '/v3/endpoints', - 'method': 'POST'}], - deprecated_rule=deprecated_create_endpoint), + operations=[{'path': '/v3/endpoints', 'method': 'POST'}], + deprecated_rule=deprecated_create_endpoint, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_endpoint', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update endpoint.', - operations=[{'path': '/v3/endpoints/{endpoint_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_endpoint), + operations=[ + {'path': '/v3/endpoints/{endpoint_id}', 'method': 'PATCH'} + ], + deprecated_rule=deprecated_update_endpoint, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_endpoint', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete endpoint.', - operations=[{'path': '/v3/endpoints/{endpoint_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_endpoint) + operations=[ + {'path': '/v3/endpoints/{endpoint_id}', 'method': 'DELETE'} + ], + deprecated_rule=deprecated_delete_endpoint, + ), ] diff --git a/keystone/common/policies/endpoint_group.py b/keystone/common/policies/endpoint_group.py index e7d6745558..06f0e41eb2 100644 --- a/keystone/common/policies/endpoint_group.py +++ b/keystone/common/policies/endpoint_group.py @@ -23,77 +23,77 @@ deprecated_list_endpoint_groups = policy.DeprecatedRule( name=base.IDENTITY % 'list_endpoint_groups', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_get_endpoint_group = policy.DeprecatedRule( name=base.IDENTITY % 'get_endpoint_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_projects_assoc_with_endpoint_group = policy.DeprecatedRule( name=base.IDENTITY % 'list_projects_associated_with_endpoint_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_endpoints_assoc_with_endpoint_group = policy.DeprecatedRule( name=base.IDENTITY % 'list_endpoints_associated_with_endpoint_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_get_endpoint_group_in_project = policy.DeprecatedRule( name=base.IDENTITY % 'get_endpoint_group_in_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_endpoint_groups_for_project = policy.DeprecatedRule( name=base.IDENTITY % 'list_endpoint_groups_for_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_endpoint_group = policy.DeprecatedRule( name=base.IDENTITY % 'create_endpoint_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_update_endpoint_group = policy.DeprecatedRule( name=base.IDENTITY % 'update_endpoint_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_endpoint_group = policy.DeprecatedRule( name=base.IDENTITY % 'delete_endpoint_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_add_endpoint_group_to_project = policy.DeprecatedRule( name=base.IDENTITY % 'add_endpoint_group_to_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_remove_endpoint_group_from_project = policy.DeprecatedRule( name=base.IDENTITY % 'remove_endpoint_group_from_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -103,106 +103,178 @@ group_endpoint_policies = [ check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create endpoint group.', - operations=[{'path': '/v3/OS-EP-FILTER/endpoint_groups', - 'method': 'POST'}], - deprecated_rule=deprecated_create_endpoint_group), + operations=[ + {'path': '/v3/OS-EP-FILTER/endpoint_groups', 'method': 'POST'} + ], + deprecated_rule=deprecated_create_endpoint_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_endpoint_groups', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List endpoint groups.', - operations=[{'path': '/v3/OS-EP-FILTER/endpoint_groups', - 'method': 'GET'}], - deprecated_rule=deprecated_list_endpoint_groups), + operations=[ + {'path': '/v3/OS-EP-FILTER/endpoint_groups', 'method': 'GET'} + ], + deprecated_rule=deprecated_list_endpoint_groups, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_endpoint_group', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Get endpoint group.', - operations=[{'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}'), - 'method': 'GET'}, - {'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}'), - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_endpoint_group), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}' + ), + 'method': 'GET', + }, + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}' + ), + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_get_endpoint_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_endpoint_group', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update endpoint group.', - operations=[{'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}'), - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_endpoint_group), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}' + ), + 'method': 'PATCH', + } + ], + deprecated_rule=deprecated_update_endpoint_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_endpoint_group', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete endpoint group.', - operations=[{'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}'), - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_endpoint_group), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}' + ), + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_endpoint_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_projects_associated_with_endpoint_group', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], - description=('List all projects associated with a specific endpoint ' - 'group.'), - operations=[{'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/projects'), - 'method': 'GET'}], - deprecated_rule=deprecated_list_projects_assoc_with_endpoint_group), + description=( + 'List all projects associated with a specific endpoint ' 'group.' + ), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' + '{endpoint_group_id}/projects' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_list_projects_assoc_with_endpoint_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_endpoints_associated_with_endpoint_group', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List all endpoints associated with an endpoint group.', - operations=[{'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/endpoints'), - 'method': 'GET'}], - deprecated_rule=deprecated_list_endpoints_assoc_with_endpoint_group), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' + '{endpoint_group_id}/endpoints' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_list_endpoints_assoc_with_endpoint_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_endpoint_group_in_project', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], - description=('Check if an endpoint group is associated with a ' - 'project.'), - operations=[{'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/projects/{project_id}'), - 'method': 'GET'}, - {'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/projects/{project_id}'), - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_endpoint_group_in_project), + description=( + 'Check if an endpoint group is associated with a ' 'project.' + ), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' + '{endpoint_group_id}/projects/{project_id}' + ), + 'method': 'GET', + }, + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' + '{endpoint_group_id}/projects/{project_id}' + ), + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_get_endpoint_group_in_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_endpoint_groups_for_project', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List endpoint groups associated with a specific project.', - operations=[{'path': ('/v3/OS-EP-FILTER/projects/{project_id}/' - 'endpoint_groups'), - 'method': 'GET'}], - deprecated_rule=deprecated_list_endpoint_groups_for_project), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/projects/{project_id}/' 'endpoint_groups' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_list_endpoint_groups_for_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'add_endpoint_group_to_project', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Allow a project to access an endpoint group.', - operations=[{'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/projects/{project_id}'), - 'method': 'PUT'}], - deprecated_rule=deprecated_add_endpoint_group_to_project), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' + '{endpoint_group_id}/projects/{project_id}' + ), + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_add_endpoint_group_to_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'remove_endpoint_group_from_project', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Remove endpoint group from project.', - operations=[{'path': ('/v3/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/projects/{project_id}'), - 'method': 'DELETE'}], - deprecated_rule=deprecated_remove_endpoint_group_from_project) + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoint_groups/' + '{endpoint_group_id}/projects/{project_id}' + ), + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_remove_endpoint_group_from_project, + ), ] diff --git a/keystone/common/policies/grant.py b/keystone/common/policies/grant.py index d47f61d6ff..45dfb78a43 100644 --- a/keystone/common/policies/grant.py +++ b/keystone/common/policies/grant.py @@ -85,69 +85,73 @@ deprecated_check_system_grant_for_user = policy.DeprecatedRule( name=base.IDENTITY % 'check_system_grant_for_user', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_system_grants_for_user = policy.DeprecatedRule( name=base.IDENTITY % 'list_system_grants_for_user', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_system_grant_for_user = policy.DeprecatedRule( name=base.IDENTITY % 'create_system_grant_for_user', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_revoke_system_grant_for_user = policy.DeprecatedRule( name=base.IDENTITY % 'revoke_system_grant_for_user', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_check_system_grant_for_group = policy.DeprecatedRule( name=base.IDENTITY % 'check_system_grant_for_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_system_grants_for_group = policy.DeprecatedRule( name=base.IDENTITY % 'list_system_grants_for_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_system_grant_for_group = policy.DeprecatedRule( name=base.IDENTITY % 'create_system_grant_for_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_revoke_system_grant_for_group = policy.DeprecatedRule( name=base.IDENTITY % 'revoke_system_grant_for_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_grants = policy.DeprecatedRule( - name=base.IDENTITY % 'list_grants', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'list_grants', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_check_grant = policy.DeprecatedRule( - name=base.IDENTITY % 'check_grant', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'check_grant', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_grant = policy.DeprecatedRule( - name=base.IDENTITY % 'create_grant', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'create_grant', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_revoke_grant = policy.DeprecatedRule( - name=base.IDENTITY % 'revoke_grant', check_str=base.RULE_ADMIN_REQUIRED, + name=base.IDENTITY % 'revoke_grant', + check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -159,36 +163,44 @@ resource_paths = [ ] -resource_paths += ['/OS-INHERIT' + path + '/inherited_to_projects' - for path in resource_paths] +resource_paths += [ + '/OS-INHERIT' + path + '/inherited_to_projects' for path in resource_paths +] collection_paths = [ '/projects/{project_id}/users/{user_id}/roles', '/projects/{project_id}/groups/{group_id}/roles', '/domains/{domain_id}/users/{user_id}/roles', - '/domains/{domain_id}/groups/{group_id}/roles' + '/domains/{domain_id}/groups/{group_id}/roles', ] inherited_collection_paths = [ - ('/OS-INHERIT/domains/{domain_id}/groups/{group_id}/roles/' - 'inherited_to_projects'), - ('/OS-INHERIT/domains/{domain_id}/users/{user_id}/roles/' - 'inherited_to_projects') + ( + '/OS-INHERIT/domains/{domain_id}/groups/{group_id}/roles/' + 'inherited_to_projects' + ), + ( + '/OS-INHERIT/domains/{domain_id}/users/{user_id}/roles/' + 'inherited_to_projects' + ), ] def list_operations(paths, methods): - return [{'path': '/v3' + path, 'method': method} - for path in paths for method in methods] + return [ + {'path': '/v3' + path, 'method': method} + for path in paths + for method in methods + ] # NOTE(samueldmq): Unlike individual resource paths, collection # paths for the inherited grants do not contain a HEAD API -list_grants_operations = ( - list_operations(collection_paths, ['GET', 'HEAD']) + - list_operations(inherited_collection_paths, ['GET'])) +list_grants_operations = list_operations( + collection_paths, ['GET', 'HEAD'] +) + list_operations(inherited_collection_paths, ['GET']) grant_policies = [ @@ -196,52 +208,64 @@ grant_policies = [ name=base.IDENTITY % 'check_grant', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], - description=('Check a role grant between a target and an actor. A ' - 'target can be either a domain or a project. An actor ' - 'can be either a user or a group. These terms also apply ' - 'to the OS-INHERIT APIs, where grants on the target ' - 'are inherited to all projects in the subtree, if ' - 'applicable.'), + description=( + 'Check a role grant between a target and an actor. A ' + 'target can be either a domain or a project. An actor ' + 'can be either a user or a group. These terms also apply ' + 'to the OS-INHERIT APIs, where grants on the target ' + 'are inherited to all projects in the subtree, if ' + 'applicable.' + ), operations=list_operations(resource_paths, ['HEAD', 'GET']), - deprecated_rule=deprecated_check_grant), + deprecated_rule=deprecated_check_grant, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_grants', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_LIST, scope_types=['system', 'domain', 'project'], - description=('List roles granted to an actor on a target. A target ' - 'can be either a domain or a project. An actor can be ' - 'either a user or a group. For the OS-INHERIT APIs, it ' - 'is possible to list inherited role grants for actors on ' - 'domains, where grants are inherited to all projects ' - 'in the specified domain.'), + description=( + 'List roles granted to an actor on a target. A target ' + 'can be either a domain or a project. An actor can be ' + 'either a user or a group. For the OS-INHERIT APIs, it ' + 'is possible to list inherited role grants for actors on ' + 'domains, where grants are inherited to all projects ' + 'in the specified domain.' + ), operations=list_grants_operations, - deprecated_rule=deprecated_list_grants), + deprecated_rule=deprecated_list_grants, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_grant', check_str=ADMIN_OR_DOMAIN_ADMIN, scope_types=['system', 'domain', 'project'], - description=('Create a role grant between a target and an actor. A ' - 'target can be either a domain or a project. An actor ' - 'can be either a user or a group. These terms also apply ' - 'to the OS-INHERIT APIs, where grants on the target ' - 'are inherited to all projects in the subtree, if ' - 'applicable.'), + description=( + 'Create a role grant between a target and an actor. A ' + 'target can be either a domain or a project. An actor ' + 'can be either a user or a group. These terms also apply ' + 'to the OS-INHERIT APIs, where grants on the target ' + 'are inherited to all projects in the subtree, if ' + 'applicable.' + ), operations=list_operations(resource_paths, ['PUT']), - deprecated_rule=deprecated_create_grant), + deprecated_rule=deprecated_create_grant, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'revoke_grant', check_str=ADMIN_OR_DOMAIN_ADMIN, scope_types=['system', 'domain', 'project'], - description=('Revoke a role grant between a target and an actor. A ' - 'target can be either a domain or a project. An actor ' - 'can be either a user or a group. These terms also apply ' - 'to the OS-INHERIT APIs, where grants on the target ' - 'are inherited to all projects in the subtree, if ' - 'applicable. In that case, revoking the role grant in ' - 'the target would remove the logical effect of ' - 'inheriting it to the target\'s projects subtree.'), + description=( + 'Revoke a role grant between a target and an actor. A ' + 'target can be either a domain or a project. An actor ' + 'can be either a user or a group. These terms also apply ' + 'to the OS-INHERIT APIs, where grants on the target ' + 'are inherited to all projects in the subtree, if ' + 'applicable. In that case, revoking the role grant in ' + 'the target would remove the logical effect of ' + 'inheriting it to the target\'s projects subtree.' + ), operations=list_operations(resource_paths, ['DELETE']), - deprecated_rule=deprecated_revoke_grant), + deprecated_rule=deprecated_revoke_grant, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_system_grants_for_user', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, @@ -250,7 +274,7 @@ grant_policies = [ operations=[ { 'path': '/v3/system/users/{user_id}/roles', - 'method': ['HEAD', 'GET'] + 'method': ['HEAD', 'GET'], } ], deprecated_rule=deprecated_list_system_grants_for_user, @@ -263,7 +287,7 @@ grant_policies = [ operations=[ { 'path': '/v3/system/users/{user_id}/roles/{role_id}', - 'method': ['HEAD', 'GET'] + 'method': ['HEAD', 'GET'], } ], deprecated_rule=deprecated_check_system_grant_for_user, @@ -276,7 +300,7 @@ grant_policies = [ operations=[ { 'path': '/v3/system/users/{user_id}/roles/{role_id}', - 'method': ['PUT'] + 'method': ['PUT'], } ], deprecated_rule=deprecated_create_system_grant_for_user, @@ -289,7 +313,7 @@ grant_policies = [ operations=[ { 'path': '/v3/system/users/{user_id}/roles/{role_id}', - 'method': ['DELETE'] + 'method': ['DELETE'], } ], deprecated_rule=deprecated_revoke_system_grant_for_user, @@ -302,7 +326,7 @@ grant_policies = [ operations=[ { 'path': '/v3/system/groups/{group_id}/roles', - 'method': ['HEAD', 'GET'] + 'method': ['HEAD', 'GET'], } ], deprecated_rule=deprecated_list_system_grants_for_group, @@ -315,7 +339,7 @@ grant_policies = [ operations=[ { 'path': '/v3/system/groups/{group_id}/roles/{role_id}', - 'method': ['HEAD', 'GET'] + 'method': ['HEAD', 'GET'], } ], deprecated_rule=deprecated_check_system_grant_for_group, @@ -328,7 +352,7 @@ grant_policies = [ operations=[ { 'path': '/v3/system/groups/{group_id}/roles/{role_id}', - 'method': ['PUT'] + 'method': ['PUT'], } ], deprecated_rule=deprecated_create_system_grant_for_group, @@ -341,11 +365,11 @@ grant_policies = [ operations=[ { 'path': '/v3/system/groups/{group_id}/roles/{role_id}', - 'method': ['DELETE'] + 'method': ['DELETE'], } ], deprecated_rule=deprecated_revoke_system_grant_for_group, - ) + ), ] diff --git a/keystone/common/policies/group.py b/keystone/common/policies/group.py index 8c8293cd39..8613524109 100644 --- a/keystone/common/policies/group.py +++ b/keystone/common/policies/group.py @@ -21,8 +21,10 @@ SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER = ( 'user_id:%(user_id)s' ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_OR_OWNER = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER + '(' + + base.RULE_ADMIN_REQUIRED + + ') or ' + + SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER ) SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER = ( @@ -32,8 +34,10 @@ SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER = ( 'domain_id:%(target.user.domain_id)s)' ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER + '(' + + base.RULE_ADMIN_REQUIRED + + ') or ' + + SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER ) SYSTEM_READER_OR_DOMAIN_READER = ( @@ -41,8 +45,7 @@ SYSTEM_READER_OR_DOMAIN_READER = ( '(role:reader and domain_id:%(target.group.domain_id)s)' ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER + '(' + base.RULE_ADMIN_REQUIRED + ') or ' + SYSTEM_READER_OR_DOMAIN_READER ) SYSTEM_ADMIN_OR_DOMAIN_ADMIN = ( @@ -58,61 +61,61 @@ deprecated_get_group = policy.DeprecatedRule( name=base.IDENTITY % 'get_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_groups = policy.DeprecatedRule( name=base.IDENTITY % 'list_groups', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_groups_for_user = policy.DeprecatedRule( name=base.IDENTITY % 'list_groups_for_user', check_str=base.RULE_ADMIN_OR_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_users_in_group = policy.DeprecatedRule( name=base.IDENTITY % 'list_users_in_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_check_user_in_group = policy.DeprecatedRule( name=base.IDENTITY % 'check_user_in_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_group = policy.DeprecatedRule( name=base.IDENTITY % 'create_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_group = policy.DeprecatedRule( name=base.IDENTITY % 'update_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_group = policy.DeprecatedRule( name=base.IDENTITY % 'delete_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_remove_user_from_group = policy.DeprecatedRule( name=base.IDENTITY % 'remove_user_from_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_add_user_to_group = policy.DeprecatedRule( name=base.IDENTITY % 'add_user_to_group', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) group_policies = [ @@ -121,91 +124,106 @@ group_policies = [ check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], description='Show group details.', - operations=[{'path': '/v3/groups/{group_id}', - 'method': 'GET'}, - {'path': '/v3/groups/{group_id}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_group), + operations=[ + {'path': '/v3/groups/{group_id}', 'method': 'GET'}, + {'path': '/v3/groups/{group_id}', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_get_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_groups', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], description='List groups.', - operations=[{'path': '/v3/groups', - 'method': 'GET'}, - {'path': '/v3/groups', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_groups), + operations=[ + {'path': '/v3/groups', 'method': 'GET'}, + {'path': '/v3/groups', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_groups, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_groups_for_user', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_OR_OWNER, scope_types=['system', 'domain', 'project'], description='List groups to which a user belongs.', - operations=[{'path': '/v3/users/{user_id}/groups', - 'method': 'GET'}, - {'path': '/v3/users/{user_id}/groups', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_groups_for_user), + operations=[ + {'path': '/v3/users/{user_id}/groups', 'method': 'GET'}, + {'path': '/v3/users/{user_id}/groups', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_groups_for_user, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_group', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Create group.', - operations=[{'path': '/v3/groups', - 'method': 'POST'}], - deprecated_rule=deprecated_create_group), + operations=[{'path': '/v3/groups', 'method': 'POST'}], + deprecated_rule=deprecated_create_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_group', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Update group.', - operations=[{'path': '/v3/groups/{group_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_group), + operations=[{'path': '/v3/groups/{group_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_group', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Delete group.', - operations=[{'path': '/v3/groups/{group_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_group), + operations=[{'path': '/v3/groups/{group_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_users_in_group', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], description='List members of a specific group.', - operations=[{'path': '/v3/groups/{group_id}/users', - 'method': 'GET'}, - {'path': '/v3/groups/{group_id}/users', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_users_in_group), + operations=[ + {'path': '/v3/groups/{group_id}/users', 'method': 'GET'}, + {'path': '/v3/groups/{group_id}/users', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_users_in_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'remove_user_from_group', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Remove user from group.', - operations=[{'path': '/v3/groups/{group_id}/users/{user_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_remove_user_from_group), + operations=[ + { + 'path': '/v3/groups/{group_id}/users/{user_id}', + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_remove_user_from_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'check_user_in_group', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP, scope_types=['system', 'domain', 'project'], description='Check whether a user is a member of a group.', - operations=[{'path': '/v3/groups/{group_id}/users/{user_id}', - 'method': 'HEAD'}, - {'path': '/v3/groups/{group_id}/users/{user_id}', - 'method': 'GET'}], - deprecated_rule=deprecated_check_user_in_group), + operations=[ + { + 'path': '/v3/groups/{group_id}/users/{user_id}', + 'method': 'HEAD', + }, + {'path': '/v3/groups/{group_id}/users/{user_id}', 'method': 'GET'}, + ], + deprecated_rule=deprecated_check_user_in_group, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'add_user_to_group', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Add user to group.', - operations=[{'path': '/v3/groups/{group_id}/users/{user_id}', - 'method': 'PUT'}], - deprecated_rule=deprecated_add_user_to_group) + operations=[ + {'path': '/v3/groups/{group_id}/users/{user_id}', 'method': 'PUT'} + ], + deprecated_rule=deprecated_add_user_to_group, + ), ] diff --git a/keystone/common/policies/identity_provider.py b/keystone/common/policies/identity_provider.py index 5bbb44f0c7..d866f21074 100644 --- a/keystone/common/policies/identity_provider.py +++ b/keystone/common/policies/identity_provider.py @@ -23,31 +23,31 @@ deprecated_get_idp = policy.DeprecatedRule( name=base.IDENTITY % 'get_identity_provider', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_idp = policy.DeprecatedRule( name=base.IDENTITY % 'list_identity_providers', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_idp = policy.DeprecatedRule( name=base.IDENTITY % 'update_identity_provider', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_idp = policy.DeprecatedRule( name=base.IDENTITY % 'create_identity_provider', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_idp = policy.DeprecatedRule( name=base.IDENTITY % 'delete_identity_provider', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -63,23 +63,22 @@ identity_provider_policies = [ # requires modifying configuration files. scope_types=['system', 'project'], description='Create identity provider.', - operations=[{'path': '/v3/OS-FEDERATION/identity_providers/{idp_id}', - 'method': 'PUT'}], - deprecated_rule=deprecated_create_idp), + operations=[ + { + 'path': '/v3/OS-FEDERATION/identity_providers/{idp_id}', + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_create_idp, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_identity_providers', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List identity providers.', operations=[ - { - 'path': '/v3/OS-FEDERATION/identity_providers', - 'method': 'GET' - }, - { - 'path': '/v3/OS-FEDERATION/identity_providers', - 'method': 'HEAD' - } + {'path': '/v3/OS-FEDERATION/identity_providers', 'method': 'GET'}, + {'path': '/v3/OS-FEDERATION/identity_providers', 'method': 'HEAD'}, ], deprecated_rule=deprecated_list_idp, ), @@ -91,12 +90,12 @@ identity_provider_policies = [ operations=[ { 'path': '/v3/OS-FEDERATION/identity_providers/{idp_id}', - 'method': 'GET' + 'method': 'GET', }, { 'path': '/v3/OS-FEDERATION/identity_providers/{idp_id}', - 'method': 'HEAD' - } + 'method': 'HEAD', + }, ], deprecated_rule=deprecated_get_idp, ), @@ -105,17 +104,27 @@ identity_provider_policies = [ check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update identity provider.', - operations=[{'path': '/v3/OS-FEDERATION/identity_providers/{idp_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_idp), + operations=[ + { + 'path': '/v3/OS-FEDERATION/identity_providers/{idp_id}', + 'method': 'PATCH', + } + ], + deprecated_rule=deprecated_update_idp, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_identity_provider', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete identity provider.', - operations=[{'path': '/v3/OS-FEDERATION/identity_providers/{idp_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_idp), + operations=[ + { + 'path': '/v3/OS-FEDERATION/identity_providers/{idp_id}', + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_idp, + ), ] diff --git a/keystone/common/policies/implied_role.py b/keystone/common/policies/implied_role.py index 63b1a43173..50c648d1c3 100644 --- a/keystone/common/policies/implied_role.py +++ b/keystone/common/policies/implied_role.py @@ -23,37 +23,37 @@ deprecated_get_implied_role = policy.DeprecatedRule( name=base.IDENTITY % 'get_implied_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_implied_roles = policy.DeprecatedRule( name=base.IDENTITY % 'list_implied_roles', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_role_inference_rules = policy.DeprecatedRule( name=base.IDENTITY % 'list_role_inference_rules', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_check_implied_role = policy.DeprecatedRule( name=base.IDENTITY % 'check_implied_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_implied_role = policy.DeprecatedRule( name=base.IDENTITY % 'create_implied_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_implied_role = policy.DeprecatedRule( name=base.IDENTITY % 'delete_implied_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -67,76 +67,96 @@ implied_role_policies = [ # files, scope_types should include 'project'. scope_types=['system', 'project'], description='Get information about an association between two roles. ' - 'When a relationship exists between a prior role and an ' - 'implied role and the prior role is assigned to a user, ' - 'the user also assumes the implied role.', + 'When a relationship exists between a prior role and an ' + 'implied role and the prior role is assigned to a user, ' + 'the user also assumes the implied role.', operations=[ - {'path': '/v3/roles/{prior_role_id}/implies/{implied_role_id}', - 'method': 'GET'}], - deprecated_rule=deprecated_get_implied_role), + { + 'path': '/v3/roles/{prior_role_id}/implies/{implied_role_id}', + 'method': 'GET', + } + ], + deprecated_rule=deprecated_get_implied_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_implied_roles', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List associations between two roles. When a relationship ' - 'exists between a prior role and an implied role and the ' - 'prior role is assigned to a user, the user also assumes ' - 'the implied role. This will return all the implied roles ' - 'that would be assumed by the user who gets the specified ' - 'prior role.', + 'exists between a prior role and an implied role and the ' + 'prior role is assigned to a user, the user also assumes ' + 'the implied role. This will return all the implied roles ' + 'that would be assumed by the user who gets the specified ' + 'prior role.', operations=[ {'path': '/v3/roles/{prior_role_id}/implies', 'method': 'GET'}, - {'path': '/v3/roles/{prior_role_id}/implies', 'method': 'HEAD'}], - deprecated_rule=deprecated_list_implied_roles), + {'path': '/v3/roles/{prior_role_id}/implies', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_implied_roles, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_implied_role', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create an association between two roles. When a ' - 'relationship exists between a prior role and an implied ' - 'role and the prior role is assigned to a user, the user ' - 'also assumes the implied role.', + 'relationship exists between a prior role and an implied ' + 'role and the prior role is assigned to a user, the user ' + 'also assumes the implied role.', operations=[ - {'path': '/v3/roles/{prior_role_id}/implies/{implied_role_id}', - 'method': 'PUT'}], - deprecated_rule=deprecated_create_implied_role), + { + 'path': '/v3/roles/{prior_role_id}/implies/{implied_role_id}', + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_create_implied_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_implied_role', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete the association between two roles. When a ' - 'relationship exists between a prior role and an implied ' - 'role and the prior role is assigned to a user, the user ' - 'also assumes the implied role. Removing the association ' - 'will cause that effect to be eliminated.', + 'relationship exists between a prior role and an implied ' + 'role and the prior role is assigned to a user, the user ' + 'also assumes the implied role. Removing the association ' + 'will cause that effect to be eliminated.', operations=[ - {'path': '/v3/roles/{prior_role_id}/implies/{implied_role_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_implied_role), + { + 'path': '/v3/roles/{prior_role_id}/implies/{implied_role_id}', + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_implied_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_role_inference_rules', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List all associations between two roles in the system. ' - 'When a relationship exists between a prior role and an ' - 'implied role and the prior role is assigned to a user, ' - 'the user also assumes the implied role.', + 'When a relationship exists between a prior role and an ' + 'implied role and the prior role is assigned to a user, ' + 'the user also assumes the implied role.', operations=[ {'path': '/v3/role_inferences', 'method': 'GET'}, - {'path': '/v3/role_inferences', 'method': 'HEAD'}], - deprecated_rule=deprecated_list_role_inference_rules), + {'path': '/v3/role_inferences', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_role_inference_rules, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'check_implied_role', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Check an association between two roles. When a ' - 'relationship exists between a prior role and an implied ' - 'role and the prior role is assigned to a user, the user ' - 'also assumes the implied role.', + 'relationship exists between a prior role and an implied ' + 'role and the prior role is assigned to a user, the user ' + 'also assumes the implied role.', operations=[ - {'path': '/v3/roles/{prior_role_id}/implies/{implied_role_id}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_check_implied_role), + { + 'path': '/v3/roles/{prior_role_id}/implies/{implied_role_id}', + 'method': 'HEAD', + } + ], + deprecated_rule=deprecated_check_implied_role, + ), ] diff --git a/keystone/common/policies/limit.py b/keystone/common/policies/limit.py index 78ee90968f..57bec21f09 100644 --- a/keystone/common/policies/limit.py +++ b/keystone/common/policies/limit.py @@ -33,49 +33,52 @@ limit_policies = [ check_str='', scope_types=['system', 'domain', 'project'], description='Get limit enforcement model.', - operations=[{'path': '/v3/limits/model', - 'method': 'GET'}, - {'path': '/v3/limits/model', - 'method': 'HEAD'}]), + operations=[ + {'path': '/v3/limits/model', 'method': 'GET'}, + {'path': '/v3/limits/model', 'method': 'HEAD'}, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_limit', check_str=ADMIN_OR_SYSTEM_OR_DOMAIN_OR_PROJECT_USER, scope_types=['system', 'domain', 'project'], description='Show limit details.', - operations=[{'path': '/v3/limits/{limit_id}', - 'method': 'GET'}, - {'path': '/v3/limits/{limit_id}', - 'method': 'HEAD'}]), + operations=[ + {'path': '/v3/limits/{limit_id}', 'method': 'GET'}, + {'path': '/v3/limits/{limit_id}', 'method': 'HEAD'}, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_limits', check_str='', scope_types=['system', 'domain', 'project'], description='List limits.', - operations=[{'path': '/v3/limits', - 'method': 'GET'}, - {'path': '/v3/limits', - 'method': 'HEAD'}]), + operations=[ + {'path': '/v3/limits', 'method': 'GET'}, + {'path': '/v3/limits', 'method': 'HEAD'}, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_limits', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create limits.', - operations=[{'path': '/v3/limits', - 'method': 'POST'}]), + operations=[{'path': '/v3/limits', 'method': 'POST'}], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_limit', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update limit.', - operations=[{'path': '/v3/limits/{limit_id}', - 'method': 'PATCH'}]), + operations=[{'path': '/v3/limits/{limit_id}', 'method': 'PATCH'}], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_limit', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete limit.', - operations=[{'path': '/v3/limits/{limit_id}', - 'method': 'DELETE'}]) + operations=[{'path': '/v3/limits/{limit_id}', 'method': 'DELETE'}], + ), ] diff --git a/keystone/common/policies/mapping.py b/keystone/common/policies/mapping.py index 472cc21ff4..2f77aab265 100644 --- a/keystone/common/policies/mapping.py +++ b/keystone/common/policies/mapping.py @@ -23,31 +23,31 @@ deprecated_get_mapping = policy.DeprecatedRule( name=base.IDENTITY % 'get_mapping', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_mappings = policy.DeprecatedRule( name=base.IDENTITY % 'list_mappings', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_mapping = policy.DeprecatedRule( name=base.IDENTITY % 'update_mapping', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_mapping = policy.DeprecatedRule( name=base.IDENTITY % 'create_mapping', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_mapping = policy.DeprecatedRule( name=base.IDENTITY % 'delete_mapping', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -56,11 +56,18 @@ mapping_policies = [ name=base.IDENTITY % 'create_mapping', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], - description=('Create a new federated mapping containing one or ' - 'more sets of rules.'), - operations=[{'path': '/v3/OS-FEDERATION/mappings/{mapping_id}', - 'method': 'PUT'}], - deprecated_rule=deprecated_create_mapping), + description=( + 'Create a new federated mapping containing one or ' + 'more sets of rules.' + ), + operations=[ + { + 'path': '/v3/OS-FEDERATION/mappings/{mapping_id}', + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_create_mapping, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_mapping', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, @@ -69,14 +76,14 @@ mapping_policies = [ operations=[ { 'path': '/v3/OS-FEDERATION/mappings/{mapping_id}', - 'method': 'GET' + 'method': 'GET', }, { 'path': '/v3/OS-FEDERATION/mappings/{mapping_id}', - 'method': 'HEAD' - } + 'method': 'HEAD', + }, ], - deprecated_rule=deprecated_get_mapping + deprecated_rule=deprecated_get_mapping, ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_mappings', @@ -84,14 +91,8 @@ mapping_policies = [ scope_types=['system', 'project'], description='List federated mappings.', operations=[ - { - 'path': '/v3/OS-FEDERATION/mappings', - 'method': 'GET' - }, - { - 'path': '/v3/OS-FEDERATION/mappings', - 'method': 'HEAD' - } + {'path': '/v3/OS-FEDERATION/mappings', 'method': 'GET'}, + {'path': '/v3/OS-FEDERATION/mappings', 'method': 'HEAD'}, ], deprecated_rule=deprecated_list_mappings, ), @@ -100,17 +101,27 @@ mapping_policies = [ check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete a federated mapping.', - operations=[{'path': '/v3/OS-FEDERATION/mappings/{mapping_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_mapping), + operations=[ + { + 'path': '/v3/OS-FEDERATION/mappings/{mapping_id}', + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_mapping, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_mapping', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update a federated mapping.', - operations=[{'path': '/v3/OS-FEDERATION/mappings/{mapping_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_mapping) + operations=[ + { + 'path': '/v3/OS-FEDERATION/mappings/{mapping_id}', + 'method': 'PATCH', + } + ], + deprecated_rule=deprecated_update_mapping, + ), ] diff --git a/keystone/common/policies/policy.py b/keystone/common/policies/policy.py index 645fff71d7..a36dfb4d77 100644 --- a/keystone/common/policies/policy.py +++ b/keystone/common/policies/policy.py @@ -23,35 +23,35 @@ deprecated_get_policy = policy.DeprecatedRule( name=base.IDENTITY % 'get_policy', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_policies = policy.DeprecatedRule( name=base.IDENTITY % 'list_policies', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_update_policy = policy.DeprecatedRule( name=base.IDENTITY % 'update_policy', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_policy = policy.DeprecatedRule( name=base.IDENTITY % 'create_policy', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_policy = policy.DeprecatedRule( name=base.IDENTITY % 'delete_policy', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -63,41 +63,41 @@ policy_policies = [ # More-or-less adding scope_types to be consistent with other policies. scope_types=['system', 'project'], description='Show policy details.', - operations=[{'path': '/v3/policies/{policy_id}', - 'method': 'GET'}], - deprecated_rule=deprecated_get_policy), + operations=[{'path': '/v3/policies/{policy_id}', 'method': 'GET'}], + deprecated_rule=deprecated_get_policy, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_policies', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List policies.', - operations=[{'path': '/v3/policies', - 'method': 'GET'}], - deprecated_rule=deprecated_list_policies), + operations=[{'path': '/v3/policies', 'method': 'GET'}], + deprecated_rule=deprecated_list_policies, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_policy', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create policy.', - operations=[{'path': '/v3/policies', - 'method': 'POST'}], - deprecated_rule=deprecated_create_policy), + operations=[{'path': '/v3/policies', 'method': 'POST'}], + deprecated_rule=deprecated_create_policy, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_policy', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update policy.', - operations=[{'path': '/v3/policies/{policy_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_policy), + operations=[{'path': '/v3/policies/{policy_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_policy, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_policy', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete policy.', - operations=[{'path': '/v3/policies/{policy_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_policy) + operations=[{'path': '/v3/policies/{policy_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_policy, + ), ] diff --git a/keystone/common/policies/policy_association.py b/keystone/common/policies/policy_association.py index 9840fc272b..3e188a7f3f 100644 --- a/keystone/common/policies/policy_association.py +++ b/keystone/common/policies/policy_association.py @@ -28,77 +28,77 @@ deprecated_check_policy_assoc_for_endpoint = policy.DeprecatedRule( name=base.IDENTITY % 'check_policy_association_for_endpoint', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_check_policy_assoc_for_service = policy.DeprecatedRule( name=base.IDENTITY % 'check_policy_association_for_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_check_policy_assoc_for_region_and_service = policy.DeprecatedRule( name=base.IDENTITY % 'check_policy_association_for_region_and_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_get_policy_for_endpoint = policy.DeprecatedRule( name=base.IDENTITY % 'get_policy_for_endpoint', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_endpoints_for_policy = policy.DeprecatedRule( name=base.IDENTITY % 'list_endpoints_for_policy', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_policy_assoc_for_endpoint = policy.DeprecatedRule( name=base.IDENTITY % 'create_policy_association_for_endpoint', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_policy_assoc_for_endpoint = policy.DeprecatedRule( name=base.IDENTITY % 'delete_policy_association_for_endpoint', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_policy_assoc_for_service = policy.DeprecatedRule( name=base.IDENTITY % 'create_policy_association_for_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_policy_assoc_for_service = policy.DeprecatedRule( name=base.IDENTITY % 'delete_policy_association_for_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_policy_assoc_for_region_and_service = policy.DeprecatedRule( name=base.IDENTITY % 'create_policy_association_for_region_and_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_policy_assoc_for_region_and_service = policy.DeprecatedRule( name=base.IDENTITY % 'delete_policy_association_for_region_and_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -108,115 +108,207 @@ policy_association_policies = [ check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Associate a policy to a specific endpoint.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints/{endpoint_id}'), - 'method': 'PUT'}], - deprecated_rule=deprecated_create_policy_assoc_for_endpoint), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'endpoints/{endpoint_id}' + ), + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_create_policy_assoc_for_endpoint, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'check_policy_association_for_endpoint', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Check policy association for endpoint.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints/{endpoint_id}'), - 'method': 'GET'}, - {'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints/{endpoint_id}'), - 'method': 'HEAD'}], - deprecated_rule=deprecated_check_policy_assoc_for_endpoint), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'endpoints/{endpoint_id}' + ), + 'method': 'GET', + }, + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'endpoints/{endpoint_id}' + ), + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_check_policy_assoc_for_endpoint, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_policy_association_for_endpoint', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete policy association for endpoint.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints/{endpoint_id}'), - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_policy_assoc_for_endpoint), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'endpoints/{endpoint_id}' + ), + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_policy_assoc_for_endpoint, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_policy_association_for_service', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Associate a policy to a specific service.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}'), - 'method': 'PUT'}], - deprecated_rule=deprecated_create_policy_assoc_for_service), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'services/{service_id}' + ), + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_create_policy_assoc_for_service, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'check_policy_association_for_service', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Check policy association for service.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}'), - 'method': 'GET'}, - {'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}'), - 'method': 'HEAD'}], - deprecated_rule=deprecated_check_policy_assoc_for_service), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'services/{service_id}' + ), + 'method': 'GET', + }, + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'services/{service_id}' + ), + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_check_policy_assoc_for_service, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_policy_association_for_service', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete policy association for service.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}'), - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_policy_assoc_for_service), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'services/{service_id}' + ), + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_policy_assoc_for_service, + ), policy.DocumentedRuleDefault( - name=base.IDENTITY % ( - 'create_policy_association_for_region_and_service'), + name=base.IDENTITY + % ('create_policy_association_for_region_and_service'), check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], - description=('Associate a policy to a specific region and service ' - 'combination.'), - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}/regions/{region_id}'), - 'method': 'PUT'}], - deprecated_rule=deprecated_create_policy_assoc_for_region_and_service), + description=( + 'Associate a policy to a specific region and service ' + 'combination.' + ), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'services/{service_id}/regions/{region_id}' + ), + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_create_policy_assoc_for_region_and_service, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'check_policy_association_for_region_and_service', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Check policy association for region and service.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}/regions/{region_id}'), - 'method': 'GET'}, - {'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}/regions/{region_id}'), - 'method': 'HEAD'}], - deprecated_rule=deprecated_check_policy_assoc_for_region_and_service), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'services/{service_id}/regions/{region_id}' + ), + 'method': 'GET', + }, + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'services/{service_id}/regions/{region_id}' + ), + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_check_policy_assoc_for_region_and_service, + ), policy.DocumentedRuleDefault( - name=base.IDENTITY % ( - 'delete_policy_association_for_region_and_service'), + name=base.IDENTITY + % ('delete_policy_association_for_region_and_service'), check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete policy association for region and service.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}/regions/{region_id}'), - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_policy_assoc_for_region_and_service), + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' + 'services/{service_id}/regions/{region_id}' + ), + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_policy_assoc_for_region_and_service, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_policy_for_endpoint', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Get policy for endpoint.', - operations=[{'path': ('/v3/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/' - 'policy'), - 'method': 'GET'}, - {'path': ('/v3/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/' - 'policy'), - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_policy_for_endpoint), + operations=[ + { + 'path': ( + '/v3/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/' 'policy' + ), + 'method': 'GET', + }, + { + 'path': ( + '/v3/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/' 'policy' + ), + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_get_policy_for_endpoint, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_endpoints_for_policy', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List endpoints for policy.', - operations=[{'path': ('/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints'), - 'method': 'GET'}], - deprecated_rule=deprecated_list_endpoints_for_policy) + operations=[ + { + 'path': ( + '/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' 'endpoints' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_list_endpoints_for_policy, + ), ] diff --git a/keystone/common/policies/project.py b/keystone/common/policies/project.py index 8796b9c388..c36e58da77 100644 --- a/keystone/common/policies/project.py +++ b/keystone/common/policies/project.py @@ -21,8 +21,10 @@ SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER = ( 'project_id:%(target.project.id)s' ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER + '(' + + base.RULE_ADMIN_REQUIRED + + ') or ' + + SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER ) SYSTEM_ADMIN_OR_DOMAIN_ADMIN_OR_PROJECT_ADMIN = ( @@ -38,7 +40,9 @@ SYSTEM_ADMIN_OR_DOMAIN_ADMIN_OR_PROJECT_ADMIN = ( # /v3/users/{user_id}/project path. SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER = ( # System reader policy - '(' + base.SYSTEM_READER + ') or ' + '(' + + base.SYSTEM_READER + + ') or ' # Domain reader policy '(role:reader and domain_id:%(target.user.domain_id)s) or ' # User accessing the API with a token they've obtained, matching @@ -46,8 +50,10 @@ SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER = ( 'user_id:%(target.user.id)s' ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER + '(' + + base.RULE_ADMIN_REQUIRED + + ') or ' + + SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER ) SYSTEM_READER_OR_DOMAIN_READER = ( @@ -56,8 +62,7 @@ SYSTEM_READER_OR_DOMAIN_READER = ( ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER + '(' + base.RULE_ADMIN_REQUIRED + ') or ' + SYSTEM_READER_OR_DOMAIN_READER ) SYSTEM_ADMIN_OR_DOMAIN_ADMIN = ( @@ -73,73 +78,73 @@ deprecated_list_projects = policy.DeprecatedRule( name=base.IDENTITY % 'list_projects', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_get_project = policy.DeprecatedRule( name=base.IDENTITY % 'get_project', check_str=base.RULE_ADMIN_OR_TARGET_PROJECT, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_user_projects = policy.DeprecatedRule( name=base.IDENTITY % 'list_user_projects', check_str=base.RULE_ADMIN_OR_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_project = policy.DeprecatedRule( name=base.IDENTITY % 'create_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_project = policy.DeprecatedRule( name=base.IDENTITY % 'update_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_project = policy.DeprecatedRule( name=base.IDENTITY % 'delete_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_project_tags = policy.DeprecatedRule( name=base.IDENTITY % 'list_project_tags', check_str=base.RULE_ADMIN_OR_TARGET_PROJECT, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_get_project_tag = policy.DeprecatedRule( name=base.IDENTITY % 'get_project_tag', check_str=base.RULE_ADMIN_OR_TARGET_PROJECT, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_update_project_tag = policy.DeprecatedRule( name=base.IDENTITY % 'update_project_tags', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_project_tag = policy.DeprecatedRule( name=base.IDENTITY % 'create_project_tag', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_project_tag = policy.DeprecatedRule( name=base.IDENTITY % 'delete_project_tag', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_project_tags = policy.DeprecatedRule( name=base.IDENTITY % 'delete_project_tags', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -157,101 +162,120 @@ project_policies = [ check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER, scope_types=['system', 'domain', 'project'], description='Show project details.', - operations=[{'path': '/v3/projects/{project_id}', - 'method': 'GET'}], - deprecated_rule=deprecated_get_project), + operations=[{'path': '/v3/projects/{project_id}', 'method': 'GET'}], + deprecated_rule=deprecated_get_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_projects', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], description='List projects.', - operations=[{'path': '/v3/projects', - 'method': 'GET'}], - deprecated_rule=deprecated_list_projects), + operations=[{'path': '/v3/projects', 'method': 'GET'}], + deprecated_rule=deprecated_list_projects, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_user_projects', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER, scope_types=['system', 'domain', 'project'], description='List projects for user.', - operations=[{'path': '/v3/users/{user_id}/projects', - 'method': 'GET'}], - deprecated_rule=deprecated_list_user_projects), + operations=[{'path': '/v3/users/{user_id}/projects', 'method': 'GET'}], + deprecated_rule=deprecated_list_user_projects, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_project', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Create project.', - operations=[{'path': '/v3/projects', - 'method': 'POST'}], - deprecated_rule=deprecated_create_project), + operations=[{'path': '/v3/projects', 'method': 'POST'}], + deprecated_rule=deprecated_create_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_project', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Update project.', - operations=[{'path': '/v3/projects/{project_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_project), + operations=[{'path': '/v3/projects/{project_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_project', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Delete project.', - operations=[{'path': '/v3/projects/{project_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_project), + operations=[{'path': '/v3/projects/{project_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_project_tags', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER, scope_types=['system', 'domain', 'project'], description='List tags for a project.', - operations=[{'path': '/v3/projects/{project_id}/tags', - 'method': 'GET'}, - {'path': '/v3/projects/{project_id}/tags', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_project_tags), + operations=[ + {'path': '/v3/projects/{project_id}/tags', 'method': 'GET'}, + {'path': '/v3/projects/{project_id}/tags', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_project_tags, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_project_tag', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER, scope_types=['system', 'domain', 'project'], description='Check if project contains a tag.', - operations=[{'path': '/v3/projects/{project_id}/tags/{value}', - 'method': 'GET'}, - {'path': '/v3/projects/{project_id}/tags/{value}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_project_tag), + operations=[ + { + 'path': '/v3/projects/{project_id}/tags/{value}', + 'method': 'GET', + }, + { + 'path': '/v3/projects/{project_id}/tags/{value}', + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_get_project_tag, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_project_tags', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Replace all tags on a project with the new set of tags.', - operations=[{'path': '/v3/projects/{project_id}/tags', - 'method': 'PUT'}], - deprecated_rule=deprecated_update_project_tag), + operations=[ + {'path': '/v3/projects/{project_id}/tags', 'method': 'PUT'} + ], + deprecated_rule=deprecated_update_project_tag, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_project_tag', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Add a single tag to a project.', - operations=[{'path': '/v3/projects/{project_id}/tags/{value}', - 'method': 'PUT'}], - deprecated_rule=deprecated_create_project_tag), + operations=[ + {'path': '/v3/projects/{project_id}/tags/{value}', 'method': 'PUT'} + ], + deprecated_rule=deprecated_create_project_tag, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_project_tags', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Remove all tags from a project.', - operations=[{'path': '/v3/projects/{project_id}/tags', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_project_tags), + operations=[ + {'path': '/v3/projects/{project_id}/tags', 'method': 'DELETE'} + ], + deprecated_rule=deprecated_delete_project_tags, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_project_tag', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Delete a specified tag from project.', - operations=[{'path': '/v3/projects/{project_id}/tags/{value}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_project_tag) + operations=[ + { + 'path': '/v3/projects/{project_id}/tags/{value}', + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_project_tag, + ), ] diff --git a/keystone/common/policies/project_endpoint.py b/keystone/common/policies/project_endpoint.py index 122363e9f2..3e001f415f 100644 --- a/keystone/common/policies/project_endpoint.py +++ b/keystone/common/policies/project_endpoint.py @@ -27,89 +27,127 @@ deprecated_list_projects_for_endpoint = policy.DeprecatedRule( name=base.IDENTITY % 'list_projects_for_endpoint', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_add_endpoint_to_project = policy.DeprecatedRule( name=base.IDENTITY % 'add_endpoint_to_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_check_endpoint_in_project = policy.DeprecatedRule( name=base.IDENTITY % 'check_endpoint_in_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_endpoints_for_project = policy.DeprecatedRule( name=base.IDENTITY % 'list_endpoints_for_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_remove_endpoint_from_project = policy.DeprecatedRule( name=base.IDENTITY % 'remove_endpoint_from_project', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) project_endpoint_policies = [ - policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_projects_for_endpoint', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List projects allowed to access an endpoint.', - operations=[{'path': ('/v3/OS-EP-FILTER/endpoints/{endpoint_id}/' - 'projects'), - 'method': 'GET'}], - deprecated_rule=deprecated_list_projects_for_endpoint), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/endpoints/{endpoint_id}/' 'projects' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_list_projects_for_endpoint, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'add_endpoint_to_project', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Allow project to access an endpoint.', - operations=[{'path': ('/v3/OS-EP-FILTER/projects/{project_id}/' - 'endpoints/{endpoint_id}'), - 'method': 'PUT'}], - deprecated_rule=deprecated_add_endpoint_to_project), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/projects/{project_id}/' + 'endpoints/{endpoint_id}' + ), + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_add_endpoint_to_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'check_endpoint_in_project', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Check if a project is allowed to access an endpoint.', - operations=[{'path': ('/v3/OS-EP-FILTER/projects/{project_id}/' - 'endpoints/{endpoint_id}'), - 'method': 'GET'}, - {'path': ('/v3/OS-EP-FILTER/projects/{project_id}/' - 'endpoints/{endpoint_id}'), - 'method': 'HEAD'}], - deprecated_rule=deprecated_check_endpoint_in_project), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/projects/{project_id}/' + 'endpoints/{endpoint_id}' + ), + 'method': 'GET', + }, + { + 'path': ( + '/v3/OS-EP-FILTER/projects/{project_id}/' + 'endpoints/{endpoint_id}' + ), + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_check_endpoint_in_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_endpoints_for_project', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List the endpoints a project is allowed to access.', - operations=[{'path': ('/v3/OS-EP-FILTER/projects/{project_id}/' - 'endpoints'), - 'method': 'GET'}], - deprecated_rule=deprecated_list_endpoints_for_project), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/projects/{project_id}/' 'endpoints' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_list_endpoints_for_project, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'remove_endpoint_from_project', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], - description=('Remove access to an endpoint from a project that has ' - 'previously been given explicit access.'), - operations=[{'path': ('/v3/OS-EP-FILTER/projects/{project_id}/' - 'endpoints/{endpoint_id}'), - 'method': 'DELETE'}], - deprecated_rule=deprecated_remove_endpoint_from_project), + description=( + 'Remove access to an endpoint from a project that has ' + 'previously been given explicit access.' + ), + operations=[ + { + 'path': ( + '/v3/OS-EP-FILTER/projects/{project_id}/' + 'endpoints/{endpoint_id}' + ), + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_remove_endpoint_from_project, + ), ] diff --git a/keystone/common/policies/protocol.py b/keystone/common/policies/protocol.py index 8a4c69624f..6f9a4c1e2a 100644 --- a/keystone/common/policies/protocol.py +++ b/keystone/common/policies/protocol.py @@ -24,31 +24,31 @@ deprecated_get_protocol = policy.DeprecatedRule( name=base.IDENTITY % 'get_protocol', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_protocols = policy.DeprecatedRule( name=base.IDENTITY % 'list_protocols', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_protocol = policy.DeprecatedRule( name=base.IDENTITY % 'update_protocol', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_protocol = policy.DeprecatedRule( name=base.IDENTITY % 'create_protocol', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_protocol = policy.DeprecatedRule( name=base.IDENTITY % 'delete_protocol', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -58,46 +58,81 @@ protocol_policies = [ check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create federated protocol.', - operations=[{'path': ('/v3/OS-FEDERATION/identity_providers/{idp_id}/' - 'protocols/{protocol_id}'), - 'method': 'PUT'}], - deprecated_rule=deprecated_create_protocol), + operations=[ + { + 'path': ( + '/v3/OS-FEDERATION/identity_providers/{idp_id}/' + 'protocols/{protocol_id}' + ), + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_create_protocol, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_protocol', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update federated protocol.', - operations=[{'path': ('/v3/OS-FEDERATION/identity_providers/{idp_id}/' - 'protocols/{protocol_id}'), - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_protocol), + operations=[ + { + 'path': ( + '/v3/OS-FEDERATION/identity_providers/{idp_id}/' + 'protocols/{protocol_id}' + ), + 'method': 'PATCH', + } + ], + deprecated_rule=deprecated_update_protocol, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_protocol', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Get federated protocol.', - operations=[{'path': ('/v3/OS-FEDERATION/identity_providers/{idp_id}/' - 'protocols/{protocol_id}'), - 'method': 'GET'}], - deprecated_rule=deprecated_get_protocol), + operations=[ + { + 'path': ( + '/v3/OS-FEDERATION/identity_providers/{idp_id}/' + 'protocols/{protocol_id}' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_get_protocol, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_protocols', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List federated protocols.', - operations=[{'path': ('/v3/OS-FEDERATION/identity_providers/{idp_id}/' - 'protocols'), - 'method': 'GET'}], - deprecated_rule=deprecated_list_protocols), + operations=[ + { + 'path': ( + '/v3/OS-FEDERATION/identity_providers/{idp_id}/' + 'protocols' + ), + 'method': 'GET', + } + ], + deprecated_rule=deprecated_list_protocols, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_protocol', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete federated protocol.', - operations=[{'path': ('/v3/OS-FEDERATION/identity_providers/{idp_id}/' - 'protocols/{protocol_id}'), - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_protocol) + operations=[ + { + 'path': ( + '/v3/OS-FEDERATION/identity_providers/{idp_id}/' + 'protocols/{protocol_id}' + ), + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_protocol, + ), ] diff --git a/keystone/common/policies/region.py b/keystone/common/policies/region.py index c24f588680..77cade21e6 100644 --- a/keystone/common/policies/region.py +++ b/keystone/common/policies/region.py @@ -23,19 +23,19 @@ deprecated_create_region = policy.DeprecatedRule( name=base.IDENTITY % 'create_region', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_region = policy.DeprecatedRule( name=base.IDENTITY % 'update_region', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_region = policy.DeprecatedRule( name=base.IDENTITY % 'delete_region', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -51,45 +51,48 @@ region_policies = [ # `system` scope. scope_types=['system', 'domain', 'project'], description='Show region details.', - operations=[{'path': '/v3/regions/{region_id}', - 'method': 'GET'}, - {'path': '/v3/regions/{region_id}', - 'method': 'HEAD'}]), + operations=[ + {'path': '/v3/regions/{region_id}', 'method': 'GET'}, + {'path': '/v3/regions/{region_id}', 'method': 'HEAD'}, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_regions', check_str='', scope_types=['system', 'domain', 'project'], description='List regions.', - operations=[{'path': '/v3/regions', - 'method': 'GET'}, - {'path': '/v3/regions', - 'method': 'HEAD'}]), + operations=[ + {'path': '/v3/regions', 'method': 'GET'}, + {'path': '/v3/regions', 'method': 'HEAD'}, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_region', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create region.', - operations=[{'path': '/v3/regions', - 'method': 'POST'}, - {'path': '/v3/regions/{region_id}', - 'method': 'PUT'}], - deprecated_rule=deprecated_create_region), + operations=[ + {'path': '/v3/regions', 'method': 'POST'}, + {'path': '/v3/regions/{region_id}', 'method': 'PUT'}, + ], + deprecated_rule=deprecated_create_region, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_region', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update region.', - operations=[{'path': '/v3/regions/{region_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_region), + operations=[{'path': '/v3/regions/{region_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_region, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_region', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete region.', - operations=[{'path': '/v3/regions/{region_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_region), + operations=[{'path': '/v3/regions/{region_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_region, + ), ] diff --git a/keystone/common/policies/registered_limit.py b/keystone/common/policies/registered_limit.py index 192984b92d..ea5130e043 100644 --- a/keystone/common/policies/registered_limit.py +++ b/keystone/common/policies/registered_limit.py @@ -20,40 +20,58 @@ registered_limit_policies = [ check_str='', scope_types=['system', 'domain', 'project'], description='Show registered limit details.', - operations=[{'path': '/v3/registered_limits/{registered_limit_id}', - 'method': 'GET'}, - {'path': '/v3/registered_limits/{registered_limit_id}', - 'method': 'HEAD'}]), + operations=[ + { + 'path': '/v3/registered_limits/{registered_limit_id}', + 'method': 'GET', + }, + { + 'path': '/v3/registered_limits/{registered_limit_id}', + 'method': 'HEAD', + }, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_registered_limits', check_str='', scope_types=['system', 'domain', 'project'], description='List registered limits.', - operations=[{'path': '/v3/registered_limits', - 'method': 'GET'}, - {'path': '/v3/registered_limits', - 'method': 'HEAD'}]), + operations=[ + {'path': '/v3/registered_limits', 'method': 'GET'}, + {'path': '/v3/registered_limits', 'method': 'HEAD'}, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_registered_limits', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create registered limits.', - operations=[{'path': '/v3/registered_limits', - 'method': 'POST'}]), + operations=[{'path': '/v3/registered_limits', 'method': 'POST'}], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_registered_limit', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update registered limit.', - operations=[{'path': '/v3/registered_limits/{registered_limit_id}', - 'method': 'PATCH'}]), + operations=[ + { + 'path': '/v3/registered_limits/{registered_limit_id}', + 'method': 'PATCH', + } + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_registered_limit', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete registered limit.', - operations=[{'path': '/v3/registered_limits/{registered_limit_id}', - 'method': 'DELETE'}]) + operations=[ + { + 'path': '/v3/registered_limits/{registered_limit_id}', + 'method': 'DELETE', + } + ], + ), ] diff --git a/keystone/common/policies/revoke_event.py b/keystone/common/policies/revoke_event.py index db55350f00..4920d8d227 100644 --- a/keystone/common/policies/revoke_event.py +++ b/keystone/common/policies/revoke_event.py @@ -20,8 +20,8 @@ revoke_event_policies = [ check_str=base.RULE_SERVICE_OR_ADMIN, scope_types=['system', 'project'], description='List revocation events.', - operations=[{'path': '/v3/OS-REVOKE/events', - 'method': 'GET'}]) + operations=[{'path': '/v3/OS-REVOKE/events', 'method': 'GET'}], + ) ] diff --git a/keystone/common/policies/role.py b/keystone/common/policies/role.py index a5ebd2647f..ab7fe577a7 100644 --- a/keystone/common/policies/role.py +++ b/keystone/common/policies/role.py @@ -23,61 +23,61 @@ deprecated_get_role = policy.DeprecatedRule( name=base.IDENTITY % 'get_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_role = policy.DeprecatedRule( name=base.IDENTITY % 'list_roles', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_role = policy.DeprecatedRule( name=base.IDENTITY % 'update_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_role = policy.DeprecatedRule( name=base.IDENTITY % 'create_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_role = policy.DeprecatedRule( name=base.IDENTITY % 'delete_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_get_domain_role = policy.DeprecatedRule( name=base.IDENTITY % 'get_domain_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_domain_roles = policy.DeprecatedRule( name=base.IDENTITY % 'list_domain_roles', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_update_domain_role = policy.DeprecatedRule( name=base.IDENTITY % 'update_domain_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_create_domain_role = policy.DeprecatedRule( name=base.IDENTITY % 'create_domain_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_domain_role = policy.DeprecatedRule( name=base.IDENTITY % 'delete_domain_role', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -87,89 +87,93 @@ role_policies = [ check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'domain', 'project'], description='Show role details.', - operations=[{'path': '/v3/roles/{role_id}', - 'method': 'GET'}, - {'path': '/v3/roles/{role_id}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_role), + operations=[ + {'path': '/v3/roles/{role_id}', 'method': 'GET'}, + {'path': '/v3/roles/{role_id}', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_get_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_roles', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'domain', 'project'], description='List roles.', - operations=[{'path': '/v3/roles', - 'method': 'GET'}, - {'path': '/v3/roles', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_role), + operations=[ + {'path': '/v3/roles', 'method': 'GET'}, + {'path': '/v3/roles', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_role', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create role.', - operations=[{'path': '/v3/roles', - 'method': 'POST'}], - deprecated_rule=deprecated_create_role), + operations=[{'path': '/v3/roles', 'method': 'POST'}], + deprecated_rule=deprecated_create_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_role', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update role.', - operations=[{'path': '/v3/roles/{role_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_role), + operations=[{'path': '/v3/roles/{role_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_role', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete role.', - operations=[{'path': '/v3/roles/{role_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_role), + operations=[{'path': '/v3/roles/{role_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_domain_role', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Show domain role.', - operations=[{'path': '/v3/roles/{role_id}', - 'method': 'GET'}, - {'path': '/v3/roles/{role_id}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_domain_role), + operations=[ + {'path': '/v3/roles/{role_id}', 'method': 'GET'}, + {'path': '/v3/roles/{role_id}', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_get_domain_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_domain_roles', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, description='List domain roles.', scope_types=['system', 'project'], - operations=[{'path': '/v3/roles?domain_id={domain_id}', - 'method': 'GET'}, - {'path': '/v3/roles?domain_id={domain_id}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_domain_roles), + operations=[ + {'path': '/v3/roles?domain_id={domain_id}', 'method': 'GET'}, + {'path': '/v3/roles?domain_id={domain_id}', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_domain_roles, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_domain_role', check_str=base.RULE_ADMIN_REQUIRED, description='Create domain role.', scope_types=['system', 'project'], - operations=[{'path': '/v3/roles', - 'method': 'POST'}], - deprecated_rule=deprecated_create_domain_role), + operations=[{'path': '/v3/roles', 'method': 'POST'}], + deprecated_rule=deprecated_create_domain_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_domain_role', check_str=base.RULE_ADMIN_REQUIRED, description='Update domain role.', scope_types=['system', 'project'], - operations=[{'path': '/v3/roles/{role_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_domain_role), + operations=[{'path': '/v3/roles/{role_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_domain_role, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_domain_role', check_str=base.RULE_ADMIN_REQUIRED, description='Delete domain role.', scope_types=['system', 'project'], - operations=[{'path': '/v3/roles/{role_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_domain_role) + operations=[{'path': '/v3/roles/{role_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_domain_role, + ), ] diff --git a/keystone/common/policies/role_assignment.py b/keystone/common/policies/role_assignment.py index f2288fff4b..76c918271d 100644 --- a/keystone/common/policies/role_assignment.py +++ b/keystone/common/policies/role_assignment.py @@ -20,8 +20,7 @@ SYSTEM_READER_OR_DOMAIN_READER = ( '(role:reader and domain_id:%(target.domain_id)s)' ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER + '(' + base.RULE_ADMIN_REQUIRED + ') or ' + SYSTEM_READER_OR_DOMAIN_READER ) SYSTEM_READER_OR_PROJECT_DOMAIN_READER_OR_PROJECT_ADMIN = ( @@ -38,13 +37,13 @@ deprecated_list_role_assignments = policy.DeprecatedRule( name=base.IDENTITY % 'list_role_assignments', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_role_assignments_for_tree = policy.DeprecatedRule( name=base.IDENTITY % 'list_role_assignments_for_tree', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) @@ -54,23 +53,26 @@ role_assignment_policies = [ check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], description='List role assignments.', - operations=[{'path': '/v3/role_assignments', - 'method': 'GET'}, - {'path': '/v3/role_assignments', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_role_assignments), + operations=[ + {'path': '/v3/role_assignments', 'method': 'GET'}, + {'path': '/v3/role_assignments', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_role_assignments, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_role_assignments_for_tree', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], - description=('List all role assignments for a given tree of ' - 'hierarchical projects.'), - operations=[{'path': '/v3/role_assignments?include_subtree', - 'method': 'GET'}, - {'path': '/v3/role_assignments?include_subtree', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_role_assignments_for_tree), - + description=( + 'List all role assignments for a given tree of ' + 'hierarchical projects.' + ), + operations=[ + {'path': '/v3/role_assignments?include_subtree', 'method': 'GET'}, + {'path': '/v3/role_assignments?include_subtree', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_role_assignments_for_tree, + ), ] diff --git a/keystone/common/policies/service.py b/keystone/common/policies/service.py index f65f9f5a48..7911549267 100644 --- a/keystone/common/policies/service.py +++ b/keystone/common/policies/service.py @@ -23,31 +23,31 @@ deprecated_get_service = policy.DeprecatedRule( name=base.IDENTITY % 'get_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_service = policy.DeprecatedRule( name=base.IDENTITY % 'list_services', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_service = policy.DeprecatedRule( name=base.IDENTITY % 'update_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_service = policy.DeprecatedRule( name=base.IDENTITY % 'create_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_service = policy.DeprecatedRule( name=base.IDENTITY % 'delete_service', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -57,41 +57,41 @@ service_policies = [ check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='Show service details.', - operations=[{'path': '/v3/services/{service_id}', - 'method': 'GET'}], - deprecated_rule=deprecated_get_service), + operations=[{'path': '/v3/services/{service_id}', 'method': 'GET'}], + deprecated_rule=deprecated_get_service, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_services', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List services.', - operations=[{'path': '/v3/services', - 'method': 'GET'}], - deprecated_rule=deprecated_list_service), + operations=[{'path': '/v3/services', 'method': 'GET'}], + deprecated_rule=deprecated_list_service, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_service', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create service.', - operations=[{'path': '/v3/services', - 'method': 'POST'}], - deprecated_rule=deprecated_create_service), + operations=[{'path': '/v3/services', 'method': 'POST'}], + deprecated_rule=deprecated_create_service, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_service', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update service.', - operations=[{'path': '/v3/services/{service_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_service), + operations=[{'path': '/v3/services/{service_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_service, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_service', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete service.', - operations=[{'path': '/v3/services/{service_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_service) + operations=[{'path': '/v3/services/{service_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_service, + ), ] diff --git a/keystone/common/policies/service_provider.py b/keystone/common/policies/service_provider.py index db73d0363c..c0de5ffba8 100644 --- a/keystone/common/policies/service_provider.py +++ b/keystone/common/policies/service_provider.py @@ -23,31 +23,31 @@ deprecated_get_sp = policy.DeprecatedRule( name=base.IDENTITY % 'get_service_provider', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_sp = policy.DeprecatedRule( name=base.IDENTITY % 'list_service_providers', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_sp = policy.DeprecatedRule( name=base.IDENTITY % 'update_service_provider', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_sp = policy.DeprecatedRule( name=base.IDENTITY % 'create_service_provider', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_sp = policy.DeprecatedRule( name=base.IDENTITY % 'delete_service_provider', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) @@ -57,26 +57,27 @@ service_provider_policies = [ check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Create federated service provider.', - operations=[{'path': ('/v3/OS-FEDERATION/service_providers/' - '{service_provider_id}'), - 'method': 'PUT'}], - deprecated_rule=deprecated_create_sp), + operations=[ + { + 'path': ( + '/v3/OS-FEDERATION/service_providers/' + '{service_provider_id}' + ), + 'method': 'PUT', + } + ], + deprecated_rule=deprecated_create_sp, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_service_providers', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List federated service providers.', operations=[ - { - 'path': '/v3/OS-FEDERATION/service_providers', - 'method': 'GET' - }, - { - 'path': '/v3/OS-FEDERATION/service_providers', - 'method': 'HEAD' - } + {'path': '/v3/OS-FEDERATION/service_providers', 'method': 'GET'}, + {'path': '/v3/OS-FEDERATION/service_providers', 'method': 'HEAD'}, ], - deprecated_rule=deprecated_list_sp + deprecated_rule=deprecated_list_sp, ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_service_provider', @@ -85,36 +86,54 @@ service_provider_policies = [ description='Get federated service provider.', operations=[ { - 'path': ('/v3/OS-FEDERATION/service_providers/' - '{service_provider_id}'), - 'method': 'GET' + 'path': ( + '/v3/OS-FEDERATION/service_providers/' + '{service_provider_id}' + ), + 'method': 'GET', }, { - 'path': ('/v3/OS-FEDERATION/service_providers/' - '{service_provider_id}'), - 'method': 'HEAD' - } + 'path': ( + '/v3/OS-FEDERATION/service_providers/' + '{service_provider_id}' + ), + 'method': 'HEAD', + }, ], - deprecated_rule=deprecated_get_sp + deprecated_rule=deprecated_get_sp, ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_service_provider', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Update federated service provider.', - operations=[{'path': ('/v3/OS-FEDERATION/service_providers/' - '{service_provider_id}'), - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_sp), + operations=[ + { + 'path': ( + '/v3/OS-FEDERATION/service_providers/' + '{service_provider_id}' + ), + 'method': 'PATCH', + } + ], + deprecated_rule=deprecated_update_sp, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_service_provider', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'project'], description='Delete federated service provider.', - operations=[{'path': ('/v3/OS-FEDERATION/service_providers/' - '{service_provider_id}'), - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_sp) + operations=[ + { + 'path': ( + '/v3/OS-FEDERATION/service_providers/' + '{service_provider_id}' + ), + 'method': 'DELETE', + } + ], + deprecated_rule=deprecated_delete_sp, + ), ] diff --git a/keystone/common/policies/token.py b/keystone/common/policies/token.py index 974c8d96f2..e8097e0305 100644 --- a/keystone/common/policies/token.py +++ b/keystone/common/policies/token.py @@ -23,19 +23,19 @@ deprecated_check_token = policy.DeprecatedRule( name=base.IDENTITY % 'check_token', check_str=base.RULE_ADMIN_OR_TOKEN_SUBJECT, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_validate_token = policy.DeprecatedRule( name=base.IDENTITY % 'validate_token', check_str=base.RULE_SERVICE_ADMIN_OR_TOKEN_SUBJECT, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_revoke_token = policy.DeprecatedRule( name=base.IDENTITY % 'revoke_token', check_str=base.RULE_ADMIN_OR_TOKEN_SUBJECT, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) ADMIN_OR_TOKEN_SUBJECT = ( @@ -58,25 +58,25 @@ token_policies = [ check_str=ADMIN_OR_SYSTEM_USER_OR_TOKEN_SUBJECT, scope_types=['system', 'domain', 'project'], description='Check a token.', - operations=[{'path': '/v3/auth/tokens', - 'method': 'HEAD'}], - deprecated_rule=deprecated_check_token), + operations=[{'path': '/v3/auth/tokens', 'method': 'HEAD'}], + deprecated_rule=deprecated_check_token, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'validate_token', check_str=ADMIN_OR_SYSTEM_USER_OR_SERVICE_OR_TOKEN_SUBJECT, scope_types=['system', 'domain', 'project'], description='Validate a token.', - operations=[{'path': '/v3/auth/tokens', - 'method': 'GET'}], - deprecated_rule=deprecated_validate_token), + operations=[{'path': '/v3/auth/tokens', 'method': 'GET'}], + deprecated_rule=deprecated_validate_token, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'revoke_token', check_str=ADMIN_OR_TOKEN_SUBJECT, scope_types=['system', 'domain', 'project'], description='Revoke a token.', - operations=[{'path': '/v3/auth/tokens', - 'method': 'DELETE'}], - deprecated_rule=deprecated_revoke_token) + operations=[{'path': '/v3/auth/tokens', 'method': 'DELETE'}], + deprecated_rule=deprecated_revoke_token, + ), ] diff --git a/keystone/common/policies/token_revocation.py b/keystone/common/policies/token_revocation.py index 28d17b08da..34d7120570 100644 --- a/keystone/common/policies/token_revocation.py +++ b/keystone/common/policies/token_revocation.py @@ -32,11 +32,12 @@ token_revocation_policies = [ # for consistency with other policies. scope_types=['system', 'project'], description='List revoked PKI tokens.', - operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked', - 'method': 'GET'}], + operations=[ + {'path': '/v3/auth/tokens/OS-PKI/revoked', 'method': 'GET'} + ], deprecated_for_removal=True, deprecated_since=versionutils.deprecated.TRAIN, - deprecated_reason=DEPRECATED_REASON + deprecated_reason=DEPRECATED_REASON, ) ] diff --git a/keystone/common/policies/trust.py b/keystone/common/policies/trust.py index 2ff2cef5ec..51c037b1f0 100644 --- a/keystone/common/policies/trust.py +++ b/keystone/common/policies/trust.py @@ -26,14 +26,29 @@ SYSTEM_ADMIN_OR_TRUSTOR = base.SYSTEM_ADMIN + ' or ' + RULE_TRUSTOR ADMIN_OR_TRUSTOR = base.RULE_ADMIN_REQUIRED + ' or ' + RULE_TRUSTOR ADMIN_OR_SYSTEM_READER_OR_TRUSTOR = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - '(' + SYSTEM_READER_OR_TRUSTOR + ')') + '(' + + base.RULE_ADMIN_REQUIRED + + ') or ' + + '(' + + SYSTEM_READER_OR_TRUSTOR + + ')' +) ADMIN_OR_SYSTEM_READER_OR_TRUSTEE = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - '(' + SYSTEM_READER_OR_TRUSTEE + ')') + '(' + + base.RULE_ADMIN_REQUIRED + + ') or ' + + '(' + + SYSTEM_READER_OR_TRUSTEE + + ')' +) ADMIN_OR_SYSTEM_READER_OR_TRUSTOR_OR_TRUSTEE = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - '(' + SYSTEM_READER_OR_TRUSTOR_OR_TRUSTEE + ')') + '(' + + base.RULE_ADMIN_REQUIRED + + ') or ' + + '(' + + SYSTEM_READER_OR_TRUSTOR_OR_TRUSTEE + + ')' +) DEPRECATED_REASON = ( "The trust API is now aware of system scope and default roles." @@ -43,31 +58,31 @@ deprecated_list_trusts = policy.DeprecatedRule( name=base.IDENTITY % 'list_trusts', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_list_roles_for_trust = policy.DeprecatedRule( name=base.IDENTITY % 'list_roles_for_trust', check_str=RULE_TRUSTOR + ' or ' + RULE_TRUSTEE, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_get_role_for_trust = policy.DeprecatedRule( name=base.IDENTITY % 'get_role_for_trust', check_str=RULE_TRUSTOR + ' or ' + RULE_TRUSTEE, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_delete_trust = policy.DeprecatedRule( name=base.IDENTITY % 'delete_trust', check_str=RULE_TRUSTOR, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) deprecated_get_trust = policy.DeprecatedRule( name=base.IDENTITY % 'get_trust', check_str=RULE_TRUSTOR + ' or ' + RULE_TRUSTEE, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.TRAIN + deprecated_since=versionutils.deprecated.TRAIN, ) trust_policies = [ @@ -79,78 +94,104 @@ trust_policies = [ # useful. For now, this should be a project only operation. scope_types=['project'], description='Create trust.', - operations=[{'path': '/v3/OS-TRUST/trusts', - 'method': 'POST'}]), + operations=[{'path': '/v3/OS-TRUST/trusts', 'method': 'POST'}], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_trusts', check_str=base.RULE_ADMIN_OR_SYSTEM_READER, scope_types=['system', 'project'], description='List trusts.', - operations=[{'path': '/v3/OS-TRUST/trusts', - 'method': 'GET'}, - {'path': '/v3/OS-TRUST/trusts', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_trusts), + operations=[ + {'path': '/v3/OS-TRUST/trusts', 'method': 'GET'}, + {'path': '/v3/OS-TRUST/trusts', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_trusts, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_trusts_for_trustor', check_str=ADMIN_OR_SYSTEM_READER_OR_TRUSTOR, scope_types=['system', 'project'], description='List trusts for trustor.', - operations=[{'path': '/v3/OS-TRUST/trusts?' - 'trustor_user_id={trustor_user_id}', - 'method': 'GET'}, - {'path': '/v3/OS-TRUST/trusts?' - 'trustor_user_id={trustor_user_id}', - 'method': 'HEAD'}]), + operations=[ + { + 'path': '/v3/OS-TRUST/trusts?' + 'trustor_user_id={trustor_user_id}', + 'method': 'GET', + }, + { + 'path': '/v3/OS-TRUST/trusts?' + 'trustor_user_id={trustor_user_id}', + 'method': 'HEAD', + }, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_trusts_for_trustee', check_str=ADMIN_OR_SYSTEM_READER_OR_TRUSTEE, scope_types=['system', 'project'], description='List trusts for trustee.', - operations=[{'path': '/v3/OS-TRUST/trusts?' - 'trustee_user_id={trustee_user_id}', - 'method': 'GET'}, - {'path': '/v3/OS-TRUST/trusts?' - 'trustee_user_id={trustee_user_id}', - 'method': 'HEAD'}]), + operations=[ + { + 'path': '/v3/OS-TRUST/trusts?' + 'trustee_user_id={trustee_user_id}', + 'method': 'GET', + }, + { + 'path': '/v3/OS-TRUST/trusts?' + 'trustee_user_id={trustee_user_id}', + 'method': 'HEAD', + }, + ], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_roles_for_trust', check_str=ADMIN_OR_SYSTEM_READER_OR_TRUSTOR_OR_TRUSTEE, scope_types=['system', 'project'], description='List roles delegated by a trust.', - operations=[{'path': '/v3/OS-TRUST/trusts/{trust_id}/roles', - 'method': 'GET'}, - {'path': '/v3/OS-TRUST/trusts/{trust_id}/roles', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_roles_for_trust), + operations=[ + {'path': '/v3/OS-TRUST/trusts/{trust_id}/roles', 'method': 'GET'}, + {'path': '/v3/OS-TRUST/trusts/{trust_id}/roles', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_roles_for_trust, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_role_for_trust', check_str=ADMIN_OR_SYSTEM_READER_OR_TRUSTOR_OR_TRUSTEE, scope_types=['system', 'project'], description='Check if trust delegates a particular role.', - operations=[{'path': '/v3/OS-TRUST/trusts/{trust_id}/roles/{role_id}', - 'method': 'GET'}, - {'path': '/v3/OS-TRUST/trusts/{trust_id}/roles/{role_id}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_role_for_trust), + operations=[ + { + 'path': '/v3/OS-TRUST/trusts/{trust_id}/roles/{role_id}', + 'method': 'GET', + }, + { + 'path': '/v3/OS-TRUST/trusts/{trust_id}/roles/{role_id}', + 'method': 'HEAD', + }, + ], + deprecated_rule=deprecated_get_role_for_trust, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_trust', check_str=ADMIN_OR_TRUSTOR, scope_types=['system', 'project'], description='Revoke trust.', - operations=[{'path': '/v3/OS-TRUST/trusts/{trust_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_trust), + operations=[ + {'path': '/v3/OS-TRUST/trusts/{trust_id}', 'method': 'DELETE'} + ], + deprecated_rule=deprecated_delete_trust, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'get_trust', check_str=ADMIN_OR_SYSTEM_READER_OR_TRUSTOR_OR_TRUSTEE, scope_types=['system', 'project'], description='Get trust.', - operations=[{'path': '/v3/OS-TRUST/trusts/{trust_id}', - 'method': 'GET'}, - {'path': '/v3/OS-TRUST/trusts/{trust_id}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_trust) + operations=[ + {'path': '/v3/OS-TRUST/trusts/{trust_id}', 'method': 'GET'}, + {'path': '/v3/OS-TRUST/trusts/{trust_id}', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_get_trust, + ), ] diff --git a/keystone/common/policies/user.py b/keystone/common/policies/user.py index e738344961..f2f94139ed 100644 --- a/keystone/common/policies/user.py +++ b/keystone/common/policies/user.py @@ -21,17 +21,17 @@ SYSTEM_READER_OR_DOMAIN_READER_OR_USER = ( 'user_id:%(target.user.id)s' ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_OR_USER = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER_OR_USER - + '(' + + base.RULE_ADMIN_REQUIRED + + ') or ' + + SYSTEM_READER_OR_DOMAIN_READER_OR_USER ) SYSTEM_READER_OR_DOMAIN_READER = ( '(' + base.SYSTEM_READER + ') or (' + base.DOMAIN_READER + ')' ) ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER = ( - '(' + base.RULE_ADMIN_REQUIRED + ') or ' + - SYSTEM_READER_OR_DOMAIN_READER + '(' + base.RULE_ADMIN_REQUIRED + ') or ' + SYSTEM_READER_OR_DOMAIN_READER ) DEPRECATED_REASON = ( @@ -42,31 +42,31 @@ deprecated_get_user = policy.DeprecatedRule( name=base.IDENTITY % 'get_user', check_str=base.RULE_ADMIN_OR_OWNER, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_list_users = policy.DeprecatedRule( name=base.IDENTITY % 'list_users', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_create_user = policy.DeprecatedRule( name=base.IDENTITY % 'create_user', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_update_user = policy.DeprecatedRule( name=base.IDENTITY % 'update_user', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) deprecated_delete_user = policy.DeprecatedRule( name=base.IDENTITY % 'delete_user', check_str=base.RULE_ADMIN_REQUIRED, deprecated_reason=DEPRECATED_REASON, - deprecated_since=versionutils.deprecated.STEIN + deprecated_since=versionutils.deprecated.STEIN, ) user_policies = [ @@ -75,21 +75,23 @@ user_policies = [ check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER_OR_USER, scope_types=['system', 'domain', 'project'], description='Show user details.', - operations=[{'path': '/v3/users/{user_id}', - 'method': 'GET'}, - {'path': '/v3/users/{user_id}', - 'method': 'HEAD'}], - deprecated_rule=deprecated_get_user), + operations=[ + {'path': '/v3/users/{user_id}', 'method': 'GET'}, + {'path': '/v3/users/{user_id}', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_get_user, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_users', check_str=ADMIN_OR_SYSTEM_READER_OR_DOMAIN_READER, scope_types=['system', 'domain', 'project'], description='List users.', - operations=[{'path': '/v3/users', - 'method': 'GET'}, - {'path': '/v3/users', - 'method': 'HEAD'}], - deprecated_rule=deprecated_list_users), + operations=[ + {'path': '/v3/users', 'method': 'GET'}, + {'path': '/v3/users', 'method': 'HEAD'}, + ], + deprecated_rule=deprecated_list_users, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_projects_for_user', check_str='', @@ -98,10 +100,11 @@ user_policies = [ # apply to scope_types or its purpose. So long as the user is in the # system and has a valid token, they should be able to generate a list # of projects they have access to. - description=('List all projects a user has access to via role ' - 'assignments.'), - operations=[{'path': ' /v3/auth/projects', - 'method': 'GET'}]), + description=( + 'List all projects a user has access to via role ' 'assignments.' + ), + operations=[{'path': ' /v3/auth/projects', 'method': 'GET'}], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'list_domains_for_user', check_str='', @@ -110,34 +113,35 @@ user_policies = [ # apply to scope_types or its purpose. So long as the user is in the # system and has a valid token, they should be able to generate a list # of domains they have access to. - description=('List all domains a user has access to via role ' - 'assignments.'), - operations=[{'path': '/v3/auth/domains', - 'method': 'GET'}]), + description=( + 'List all domains a user has access to via role ' 'assignments.' + ), + operations=[{'path': '/v3/auth/domains', 'method': 'GET'}], + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'create_user', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Create a user.', - operations=[{'path': '/v3/users', - 'method': 'POST'}], - deprecated_rule=deprecated_create_user), + operations=[{'path': '/v3/users', 'method': 'POST'}], + deprecated_rule=deprecated_create_user, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'update_user', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Update a user, including administrative password resets.', - operations=[{'path': '/v3/users/{user_id}', - 'method': 'PATCH'}], - deprecated_rule=deprecated_update_user), + operations=[{'path': '/v3/users/{user_id}', 'method': 'PATCH'}], + deprecated_rule=deprecated_update_user, + ), policy.DocumentedRuleDefault( name=base.IDENTITY % 'delete_user', check_str=base.RULE_ADMIN_REQUIRED, scope_types=['system', 'domain', 'project'], description='Delete a user.', - operations=[{'path': '/v3/users/{user_id}', - 'method': 'DELETE'}], - deprecated_rule=deprecated_delete_user) + operations=[{'path': '/v3/users/{user_id}', 'method': 'DELETE'}], + deprecated_rule=deprecated_delete_user, + ), ] diff --git a/keystone/common/profiler.py b/keystone/common/profiler.py index cf5fef32b2..e1232ee5db 100644 --- a/keystone/common/profiler.py +++ b/keystone/common/profiler.py @@ -30,17 +30,15 @@ def setup(name, host='0.0.0.0'): # nosec """ if CONF.profiler.enabled: osprofiler.initializer.init_from_conf( - conf=CONF, - context={}, - project="keystone", - service=name, - host=host + conf=CONF, context={}, project="keystone", service=name, host=host + ) + LOG.info( + "OSProfiler is enabled.\n" + "Traces provided from the profiler " + "can only be subscribed to using the same HMAC keys that " + "are configured in Keystone's configuration file " + "under the [profiler] section. \n To disable OSprofiler " + "set in /etc/keystone/keystone.conf:\n" + "[profiler]\n" + "enabled=false" ) - LOG.info("OSProfiler is enabled.\n" - "Traces provided from the profiler " - "can only be subscribed to using the same HMAC keys that " - "are configured in Keystone's configuration file " - "under the [profiler] section. \n To disable OSprofiler " - "set in /etc/keystone/keystone.conf:\n" - "[profiler]\n" - "enabled=false") diff --git a/keystone/common/provider_api.py b/keystone/common/provider_api.py index b9074f232c..6edebace60 100644 --- a/keystone/common/provider_api.py +++ b/keystone/common/provider_api.py @@ -29,20 +29,22 @@ class ProviderAPIRegistry(object): # Use "super" to bypass the __setattr__ preventing changes to the # object itself. super(ProviderAPIRegistry, self).__setattr__( - '__dict__', self.__shared_object_state) + '__dict__', self.__shared_object_state + ) def __getattr__(self, item): """Do attr lookup.""" try: return self.__registry[item] except KeyError: - raise AttributeError( - "'ProviderAPIs' has no attribute %s" % item) + raise AttributeError("'ProviderAPIs' has no attribute %s" % item) def __setattr__(self, key, value): """Do not allow setting values on the registry object.""" - raise RuntimeError('Programming Error: You may not set values on the ' - 'ProviderAPIRegistry objects.') + raise RuntimeError( + 'Programming Error: You may not set values on the ' + 'ProviderAPIRegistry objects.' + ) def _register_provider_api(self, name, obj): """Register an instance of a class as a provider api.""" @@ -53,13 +55,15 @@ class ProviderAPIRegistry(object): raise RuntimeError( 'Programming Error: The provider api registry has been ' 'locked (post configuration). Ensure all provider api ' - 'managers are instantiated before locking.') + 'managers are instantiated before locking.' + ) if name in self.__registry: raise DuplicateProviderError( '`%(name)s` has already been registered as an api ' - 'provider by `%(prov)r`' % {'name': name, - 'prov': self.__registry[name]}) + 'provider by `%(prov)r`' + % {'name': name, 'prov': self.__registry[name]} + ) self.__registry[name] = obj def _clear_registry_instances(self): @@ -85,6 +89,7 @@ class ProviderAPIRegistry(object): :param method: the method on the api to return :type method: str """ + class DeferredProviderLookup(object): def __init__(self, api, method): self.__api = api diff --git a/keystone/common/rbac_enforcer/enforcer.py b/keystone/common/rbac_enforcer/enforcer.py index 7add048ce8..d00138188b 100644 --- a/keystone/common/rbac_enforcer/enforcer.py +++ b/keystone/common/rbac_enforcer/enforcer.py @@ -33,10 +33,13 @@ LOG = log.getLogger(__name__) PROVIDER_APIS = provider_api.ProviderAPIs -_POSSIBLE_TARGET_ACTIONS = frozenset([ - rule.name for - rule in policies.list_rules() if not rule.deprecated_for_removal -]) +_POSSIBLE_TARGET_ACTIONS = frozenset( + [ + rule.name + for rule in policies.list_rules() + if not rule.deprecated_for_removal + ] +) _ENFORCEMENT_CHECK_ATTR = 'keystone:RBAC:enforcement_called' @@ -64,35 +67,45 @@ class RBACEnforcer(object): def _check_deprecated_rule(self, action): def _name_is_changing(rule): deprecated_rule = rule.deprecated_rule - return (deprecated_rule and - deprecated_rule.name != rule.name and - deprecated_rule.name in self._enforcer.file_rules) + return ( + deprecated_rule + and deprecated_rule.name != rule.name + and deprecated_rule.name in self._enforcer.file_rules + ) def _check_str_is_changing(rule): deprecated_rule = rule.deprecated_rule - return (deprecated_rule and - deprecated_rule.check_str != rule.check_str and - rule.name not in self._enforcer.file_rules) + return ( + deprecated_rule + and deprecated_rule.check_str != rule.check_str + and rule.name not in self._enforcer.file_rules + ) def _is_deprecated_for_removal(rule): - return (rule.deprecated_for_removal and - rule.name in self._enforcer.file_rules) + return ( + rule.deprecated_for_removal + and rule.name in self._enforcer.file_rules + ) def _emit_warning(): if not self._enforcer._warning_emitted: - LOG.warning("Deprecated policy rules found. Use " - "oslopolicy-policy-generator and " - "oslopolicy-policy-upgrade to detect and resolve " - "deprecated policies in your configuration.") + LOG.warning( + "Deprecated policy rules found. Use " + "oslopolicy-policy-generator and " + "oslopolicy-policy-upgrade to detect and resolve " + "deprecated policies in your configuration." + ) self._enforcer._warning_emitted = True registered_rule = self._enforcer.registered_rules.get(action) if not registered_rule: return - if (_name_is_changing(registered_rule) or - _check_str_is_changing(registered_rule) or - _is_deprecated_for_removal(registered_rule)): + if ( + _name_is_changing(registered_rule) + or _check_str_is_changing(registered_rule) + or _is_deprecated_for_removal(registered_rule) + ): _emit_warning() def _enforce(self, credentials, action, target, do_raise=True): @@ -118,12 +131,14 @@ class RBACEnforcer(object): # Add the exception arguments if asked to do a raise extra = {} if do_raise: - extra.update(exc=exception.ForbiddenAction, action=action, - do_raise=do_raise) + extra.update( + exc=exception.ForbiddenAction, action=action, do_raise=do_raise + ) try: result = self._enforcer.enforce( - rule=action, target=target, creds=credentials, **extra) + rule=action, target=target, creds=credentials, **extra + ) self._check_deprecated_rule(action) return result except common_policy.InvalidScope: @@ -160,13 +175,17 @@ class RBACEnforcer(object): def _extract_filter_values(filters): """Extract filter data from query params for RBAC enforcement.""" filters = filters or [] - target = {i: flask.request.args[i] for - i in filters if i in flask.request.args} + target = { + i: flask.request.args[i] + for i in filters + if i in flask.request.args + } if target: if LOG.logger.getEffectiveLevel() <= log.DEBUG: LOG.debug( 'RBAC: Adding query filter params (%s)', - ', '.join(['%s=%s' % (k, v) for k, v in target.items()])) + ', '.join(['%s=%s' % (k, v) for k, v in target.items()]), + ) return target @staticmethod @@ -181,14 +200,19 @@ class RBACEnforcer(object): :rtype: dict """ ret_dict = {} - if ((member_target is not None and member_target_type is None) or - (member_target is None and member_target_type is not None)): - LOG.warning('RBAC: Unknown target type or target reference. ' - 'Rejecting as unauthorized. ' - '(member_target_type=%(target_type)r, ' - 'member_target=%(target_ref)r)', - {'target_type': member_target_type, - 'target_ref': member_target}) + if (member_target is not None and member_target_type is None) or ( + member_target is None and member_target_type is not None + ): + LOG.warning( + 'RBAC: Unknown target type or target reference. ' + 'Rejecting as unauthorized. ' + '(member_target_type=%(target_type)r, ' + 'member_target=%(target_ref)r)', + { + 'target_type': member_target_type, + 'target_ref': member_target, + }, + ) # Fast exit. return ret_dict @@ -206,7 +230,8 @@ class RBACEnforcer(object): # should be more protection against something wonky # here. resource = flask.current_app.view_functions[ - flask.request.endpoint].view_class + flask.request.endpoint + ].view_class try: member_name = getattr(resource, 'member_key', None) except ValueError: @@ -216,8 +241,7 @@ class RBACEnforcer(object): # normal and acceptable. Set member_name to None as though # it wasn't set. member_name = None - func = getattr( - resource, 'get_member_from_driver', None) + func = getattr(resource, 'get_member_from_driver', None) if member_name is not None and callable(func): key = '%s_id' % member_name if key in (flask.request.view_args or {}): @@ -251,17 +275,18 @@ class RBACEnforcer(object): target = 'token' subject_token = flask.request.headers.get('X-Subject-Token') access_rules_support = flask.request.headers.get( - authorization.ACCESS_RULES_HEADER) + authorization.ACCESS_RULES_HEADER + ) if subject_token is not None: - allow_expired = (strutils.bool_from_string( - flask.request.args.get('allow_expired', False), - default=False)) + allow_expired = strutils.bool_from_string( + flask.request.args.get('allow_expired', False), default=False + ) if allow_expired: window_seconds = CONF.token.allow_expired_window token = PROVIDER_APIS.token_provider_api.validate_token( subject_token, window_seconds=window_seconds, - access_rules_support=access_rules_support + access_rules_support=access_rules_support, ) # TODO(morgan): Expand extracted data from the subject token. ret_dict[target] = {} @@ -284,15 +309,21 @@ class RBACEnforcer(object): def _assert_is_authenticated(cls): ctx = cls._get_oslo_req_context() if ctx is None: - LOG.warning('RBAC: Error reading the request context generated by ' - 'the Auth Middleware (there is no context). Rejecting ' - 'request as unauthorized.') + LOG.warning( + 'RBAC: Error reading the request context generated by ' + 'the Auth Middleware (there is no context). Rejecting ' + 'request as unauthorized.' + ) raise exception.Unauthorized( - _('Internal error processing authentication and ' - 'authorization.')) + _( + 'Internal error processing authentication and ' + 'authorization.' + ) + ) if not ctx.authenticated: raise exception.Unauthorized( - _('auth_context did not decode anything useful')) + _('auth_context did not decode anything useful') + ) @classmethod def _shared_admin_auth_token_set(cls): @@ -300,9 +331,16 @@ class RBACEnforcer(object): return getattr(ctx, 'is_admin', False) @classmethod - def enforce_call(cls, enforcer=None, action=None, target_attr=None, - member_target_type=None, member_target=None, - filters=None, build_target=None): + def enforce_call( + cls, + enforcer=None, + action=None, + target_attr=None, + member_target_type=None, + member_target=None, + filters=None, + build_target=None, + ): """Enforce RBAC on the current request. This will do some legwork and then instantiate the Enforcer if an @@ -354,11 +392,13 @@ class RBACEnforcer(object): # @policy_enforcer_action decorator was used. action = action or getattr(flask.g, cls.ACTION_STORE_ATTR, None) if action not in _POSSIBLE_TARGET_ACTIONS: - LOG.warning('RBAC: Unknown enforcement action name `%s`. ' - 'Rejecting as Forbidden, this is a programming error ' - 'and a bug should be filed with as much information ' - 'about the request that caused this as possible.', - action) + LOG.warning( + 'RBAC: Unknown enforcement action name `%s`. ' + 'Rejecting as Forbidden, this is a programming error ' + 'and a bug should be filed with as much information ' + 'about the request that caused this as possible.', + action, + ) # NOTE(morgan): While this is an internal error, a 500 is never # desirable, we have handled the case and the most appropriate # response here is to issue a 403 (FORBIDDEN) to any API calling @@ -368,7 +408,9 @@ class RBACEnforcer(object): raise exception.Forbidden( message=_( 'Internal RBAC enforcement error, invalid rule (action) ' - 'name.')) + 'name.' + ) + ) # Mark flask.g as "enforce_call" has been called. This should occur # before anything except the "is this a valid action" check, ensuring @@ -399,22 +441,29 @@ class RBACEnforcer(object): # Get the Target Data Set. if target_attr is None and build_target is None: try: - policy_dict.update(cls._extract_member_target_data( - member_target_type, member_target)) + policy_dict.update( + cls._extract_member_target_data( + member_target_type, member_target + ) + ) except exception.NotFound: # DEBUG LOG and bubble up the 404 error. This is expected # behavior. This likely should be specific in each API. This # should be revisited in the future and each API should make # the explicit "existence" checks before enforcement. - LOG.debug('Extracting inferred target data resulted in ' - '"NOT FOUND (404)".') + LOG.debug( + 'Extracting inferred target data resulted in ' + '"NOT FOUND (404)".' + ) raise except Exception as e: # nosec # NOTE(morgan): Errors should never bubble up at this point, # if there is an error getting the target, log it and move # on. Raise an explicit 403, we have failed policy checks. - LOG.warning('Unable to extract inferred target data during ' - 'enforcement') + LOG.warning( + 'Unable to extract inferred target data during ' + 'enforcement' + ) LOG.debug(e, exc_info=True) raise exception.ForbiddenAction(action=action) @@ -422,11 +471,14 @@ class RBACEnforcer(object): subj_token_target_data = cls._extract_subject_token_target_data() if subj_token_target_data: policy_dict.setdefault('target', {}).update( - subj_token_target_data) + subj_token_target_data + ) else: if target_attr and build_target: - raise ValueError('Programming Error: A target_attr or ' - 'build_target must be provided, but not both') + raise ValueError( + 'Programming Error: A target_attr or ' + 'build_target must be provided, but not both' + ) policy_dict['target'] = target_attr or build_target() @@ -443,25 +495,33 @@ class RBACEnforcer(object): if LOG.logger.getEffectiveLevel() <= log.DEBUG: # LOG the Args args_str = ', '.join( - ['%s=%s' % (k, v) for - k, v in (flask.request.view_args or {}).items()]) + [ + '%s=%s' % (k, v) + for k, v in (flask.request.view_args or {}).items() + ] + ) args_str = strutils.mask_password(args_str) - LOG.debug('RBAC: Authorizing `%(action)s(%(args)s)`', - {'action': action, 'args': args_str}) + LOG.debug( + 'RBAC: Authorizing `%(action)s(%(args)s)`', + {'action': action, 'args': args_str}, + ) ctxt = cls._get_oslo_req_context() # Instantiate the enforcer object if needed. enforcer_obj = enforcer or cls() enforcer_obj._enforce( - credentials=ctxt, action=action, target=flattened) + credentials=ctxt, action=action, target=flattened + ) LOG.debug('RBAC: Authorization granted') @classmethod def policy_enforcer_action(cls, action): """Decorator to set policy enforcement action name.""" if action not in _POSSIBLE_TARGET_ACTIONS: - raise ValueError('PROGRAMMING ERROR: Action must reference a ' - 'valid Keystone policy enforcement name.') + raise ValueError( + 'PROGRAMMING ERROR: Action must reference a ' + 'valid Keystone policy enforcement name.' + ) def wrapper(f): @functools.wraps(f) @@ -470,7 +530,9 @@ class RBACEnforcer(object): # later. setattr(flask.g, cls.ACTION_STORE_ATTR, action) return f(*args, **kwargs) + return inner + return wrapper @staticmethod diff --git a/keystone/common/render_token.py b/keystone/common/render_token.py index fec7bec494..0526e8d874 100644 --- a/keystone/common/render_token.py +++ b/keystone/common/render_token.py @@ -25,13 +25,11 @@ def render_token_response_from_model(token, include_catalog=True): 'user': { 'domain': { 'id': token.user_domain['id'], - 'name': token.user_domain['name'] + 'name': token.user_domain['name'], }, 'id': token.user_id, 'name': token.user['name'], - 'password_expires_at': token.user[ - 'password_expires_at' - ] + 'password_expires_at': token.user['password_expires_at'], }, 'audit_ids': token.audit_ids, 'expires_at': token.expires_at, @@ -44,7 +42,7 @@ def render_token_response_from_model(token, include_catalog=True): elif token.domain_scoped: token_reference['token']['domain'] = { 'id': token.domain['id'], - 'name': token.domain['name'] + 'name': token.domain['name'], } token_reference['token']['roles'] = token.roles elif token.trust_scoped: @@ -52,15 +50,15 @@ def render_token_response_from_model(token, include_catalog=True): 'id': token.trust_id, 'trustor_user': {'id': token.trustor['id']}, 'trustee_user': {'id': token.trustee['id']}, - 'impersonation': token.trust['impersonation'] + 'impersonation': token.trust['impersonation'], } token_reference['token']['project'] = { 'domain': { 'id': token.project_domain['id'], - 'name': token.project_domain['name'] + 'name': token.project_domain['name'], }, 'id': token.trust_project['id'], - 'name': token.trust_project['name'] + 'name': token.trust_project['name'], } if token.trust.get('impersonation'): trustor_domain = PROVIDERS.resource_api.get_domain( @@ -69,23 +67,21 @@ def render_token_response_from_model(token, include_catalog=True): token_reference['token']['user'] = { 'domain': { 'id': trustor_domain['id'], - 'name': trustor_domain['name'] + 'name': trustor_domain['name'], }, 'id': token.trustor['id'], 'name': token.trustor['name'], - 'password_expires_at': token.trustor[ - 'password_expires_at' - ] + 'password_expires_at': token.trustor['password_expires_at'], } token_reference['token']['roles'] = token.roles elif token.project_scoped: token_reference['token']['project'] = { 'domain': { 'id': token.project_domain['id'], - 'name': token.project_domain['name'] + 'name': token.project_domain['name'], }, 'id': token.project['id'], - 'name': token.project['name'] + 'name': token.project['name'], } token_reference['token']['is_domain'] = token.project.get( 'is_domain', False @@ -95,8 +91,8 @@ def render_token_response_from_model(token, include_catalog=True): ap_domain_name = CONF.resource.admin_project_domain_name if ap_name and ap_domain_name: is_ap = ( - token.project['name'] == ap_name and - ap_domain_name == token.project_domain['name'] + token.project['name'] == ap_name + and ap_domain_name == token.project_domain['name'] ) token_reference['token']['is_admin_project'] = is_ap if include_catalog and not token.unscoped: @@ -116,26 +112,23 @@ def render_token_response_from_model(token, include_catalog=True): groups=token.federated_groups, identity_provider={'id': token.identity_provider_id}, protocol={'id': token.protocol_id}, - - ) - token_reference['token']['user']['OS-FEDERATION'] = ( - federated_dict ) + token_reference['token']['user']['OS-FEDERATION'] = federated_dict del token_reference['token']['user']['password_expires_at'] if token.access_token_id: token_reference['token']['OS-OAUTH1'] = { 'access_token_id': token.access_token_id, - 'consumer_id': token.access_token['consumer_id'] + 'consumer_id': token.access_token['consumer_id'], } if token.application_credential_id: key = 'application_credential' token_reference['token'][key] = {} - token_reference['token'][key]['id'] = ( - token.application_credential['id'] - ) - token_reference['token'][key]['name'] = ( - token.application_credential['name'] - ) + token_reference['token'][key]['id'] = token.application_credential[ + 'id' + ] + token_reference['token'][key]['name'] = token.application_credential[ + 'name' + ] restricted = not token.application_credential['unrestricted'] token_reference['token'][key]['restricted'] = restricted if token.application_credential.get('access_rules'): diff --git a/keystone/common/resource_options/core.py b/keystone/common/resource_options/core.py index 68fefe1896..79a630a462 100644 --- a/keystone/common/resource_options/core.py +++ b/keystone/common/resource_options/core.py @@ -39,7 +39,8 @@ def ref_mapper_to_dict_options(ref): for opt in ref._resource_option_mapper.values(): if opt.option_id in ref.resource_options_registry.option_ids: r_opt = ref.resource_options_registry.get_option_by_id( - opt.option_id) + opt.option_id + ) if r_opt is not None: options[r_opt.option_name] = opt.option_value return options @@ -86,7 +87,8 @@ def resource_options_ref_to_mapper(ref, option_class): # Get any options that are not registered and slate them for removal from # the DB. This will delete unregistered options. clear_options = set_options.difference( - ref.resource_options_registry.option_ids) + ref.resource_options_registry.option_ids + ) options.update({x: None for x in clear_options}) # Set the resource options for user in the Attribute Mapping. @@ -98,8 +100,8 @@ def resource_options_ref_to_mapper(ref, option_class): else: # Set any options on the user_ref itself. opt_obj = option_class( - option_id=r_opt_id, - option_value=r_opt_value) + option_id=r_opt_id, option_value=r_opt_value + ) ref._resource_option_mapper[r_opt_id] = opt_obj @@ -114,8 +116,9 @@ class ResourceOptionRegistry(object): @property def options_by_name(self): - return {opt.option_name: opt - for opt in self._registered_options.values()} + return { + opt.option_name: opt for opt in self._registered_options.values() + } @property def options(self): @@ -136,15 +139,18 @@ class ResourceOptionRegistry(object): @property def json_schema(self): - schema = {'type': 'object', - 'properties': {}, - 'additionalProperties': False} + schema = { + 'type': 'object', + 'properties': {}, + 'additionalProperties': False, + } for opt in self.options: if opt.json_schema is not None: # NOTE(notmorgan): All options are nullable. Null indicates # the option should be reset and removed from the DB store. schema['properties'][opt.option_name] = validation.nullable( - opt.json_schema) + opt.json_schema + ) else: # NOTE(notmorgan): without 'type' being specified, this # can be of any-type. We are simply specifying no interesting @@ -158,22 +164,33 @@ class ResourceOptionRegistry(object): return if option.option_id in self._registered_options: - raise ValueError(_('Option %(option_id)s already defined in ' - '%(registry)s.') % - {'option_id': option.option_id, - 'registry': self._registry_type}) + raise ValueError( + _('Option %(option_id)s already defined in ' '%(registry)s.') + % { + 'option_id': option.option_id, + 'registry': self._registry_type, + } + ) if option.option_name in self.option_names: - raise ValueError(_('Option %(option_name)s already defined in ' - '%(registry)s') % - {'option_name': option.option_name, - 'registry': self._registry_type}) + raise ValueError( + _('Option %(option_name)s already defined in ' '%(registry)s') + % { + 'option_name': option.option_name, + 'registry': self._registry_type, + } + ) self._registered_options[option.option_id] = option class ResourceOption(object): - def __init__(self, option_id, option_name, validator=_validator, - json_schema_validation=None): + def __init__( + self, + option_id, + option_name, + validator=_validator, + json_schema_validation=None, + ): """The base object to define the option(s) to be stored in the DB. :param option_id: The ID of the option. This will be used to lookup @@ -201,15 +218,18 @@ class ResourceOption(object): :type json_schema_validation: dict """ if not isinstance(option_id, str) and len(option_id) == 4: - raise TypeError(_('`option_id` must be a string, got %r') - % option_id) + raise TypeError( + _('`option_id` must be a string, got %r') % option_id + ) elif len(option_id) != 4: - raise ValueError(_('`option_id` must be 4 characters in ' - - 'length. Got %r') % option_id) + raise ValueError( + _('`option_id` must be 4 characters in ' 'length. Got %r') + % option_id + ) if not isinstance(option_name, str): - raise TypeError(_('`option_name` must be a string. ' - 'Got %r') % option_name) + raise TypeError( + _('`option_name` must be a string. ' 'Got %r') % option_name + ) self._option_id = option_id self._option_name = option_name diff --git a/keystone/common/resource_options/options/immutable.py b/keystone/common/resource_options/options/immutable.py index f8fd02655b..8f0c44f209 100644 --- a/keystone/common/resource_options/options/immutable.py +++ b/keystone/common/resource_options/options/immutable.py @@ -15,13 +15,12 @@ from keystone.common.resource_options import core as ro_core from keystone.common.validation import parameter_types from keystone import exception -IMMUTABLE_OPT = ( - ro_core.ResourceOption( - option_id='IMMU', - option_name='immutable', - validator=ro_core.boolean_validator, - json_schema_validation=parameter_types.boolean - )) +IMMUTABLE_OPT = ro_core.ResourceOption( + option_id='IMMU', + option_name='immutable', + validator=ro_core.boolean_validator, + json_schema_validation=parameter_types.boolean, +) def check_resource_immutable(resource_ref): @@ -30,11 +29,13 @@ def check_resource_immutable(resource_ref): :param resource_ref: a dict reference of a resource to inspect """ return resource_ref.get('options', {}).get( - IMMUTABLE_OPT.option_name, False) + IMMUTABLE_OPT.option_name, False + ) -def check_immutable_update(original_resource_ref, new_resource_ref, type, - resource_id): +def check_immutable_update( + original_resource_ref, new_resource_ref, type, resource_id +): """Check if an update is allowed to an immutable resource. Valid cases where an update is allowed: @@ -53,11 +54,14 @@ def check_immutable_update(original_resource_ref, new_resource_ref, type, immutable = check_resource_immutable(original_resource_ref) if immutable: new_options = new_resource_ref.get('options', {}) - if ((len(new_resource_ref.keys()) > 1) or - (IMMUTABLE_OPT.option_name not in new_options) or - (new_options[IMMUTABLE_OPT.option_name] not in (False, None))): + if ( + (len(new_resource_ref.keys()) > 1) + or (IMMUTABLE_OPT.option_name not in new_options) + or (new_options[IMMUTABLE_OPT.option_name] not in (False, None)) + ): raise exception.ResourceUpdateForbidden( - type=type, resource_id=resource_id) + type=type, resource_id=resource_id + ) def check_immutable_delete(resource_ref, resource_type, resource_id): @@ -70,4 +74,5 @@ def check_immutable_delete(resource_ref, resource_type, resource_id): """ if check_resource_immutable(resource_ref): raise exception.ResourceDeleteForbidden( - type=resource_type, resource_id=resource_id) + type=resource_type, resource_id=resource_id + ) diff --git a/keystone/common/sql/core.py b/keystone/common/sql/core.py index fdeb15136c..1101e5b300 100644 --- a/keystone/common/sql/core.py +++ b/keystone/common/sql/core.py @@ -74,9 +74,7 @@ Unicode = sql.Unicode def initialize(): """Initialize the module.""" - db_options.set_defaults( - CONF, - connection="sqlite:///keystone.db") + db_options.set_defaults(CONF, connection="sqlite:///keystone.db") # Configure OSprofiler options profiler.set_defaults(CONF, enabled=False, trace_sqlalchemy=False) @@ -93,6 +91,7 @@ def initialize_decorator(init): definition. """ + def initialize(self, *args, **kwargs): cls = type(self) for k, v in kwargs.items(): @@ -105,9 +104,11 @@ def initialize_decorator(init): v = str(v) if column.type.length and column.type.length < len(v): raise exception.StringLengthExceeded( - string=v, type=k, length=column.type.length) + string=v, type=k, length=column.type.length + ) init(self, *args, **kwargs) + return initialize @@ -159,8 +160,12 @@ class DateTimeInt(sql_types.TypeDecorator): return value else: if not isinstance(value, datetime.datetime): - raise ValueError(_('Programming Error: value to be stored ' - 'must be a datetime object.')) + raise ValueError( + _( + 'Programming Error: value to be stored ' + 'must be a datetime object.' + ) + ) value = timeutils.normalize_time(value) value = value.replace(tzinfo=datetime.timezone.utc) # NOTE(morgan): We are casting this to an int, and ensuring we @@ -179,8 +184,9 @@ class DateTimeInt(sql_types.TypeDecorator): # NOTE(morgan): Explictly use timezone "datetime.timezone.utc" to # ensure we are not adjusting the actual datetime object from what # we stored. - dt_obj = datetime.datetime.fromtimestamp(value, - tz=datetime.timezone.utc) + dt_obj = datetime.datetime.fromtimestamp( + value, tz=datetime.timezone.utc + ) # Return non-tz aware datetime object (as keystone expects) return timeutils.normalize_time(dt_obj) @@ -195,11 +201,13 @@ class ModelDictMixinWithExtras(models.ModelBase): """ attributes = [] - _msg = ('Programming Error: Model does not have an "extra" column. ' - 'Unless the model already has an "extra" column and has ' - 'existed in a previous released version of keystone with ' - 'the extra column included, the model should use ' - '"ModelDictMixin" instead.') + _msg = ( + 'Programming Error: Model does not have an "extra" column. ' + 'Unless the model already has an "extra" column and has ' + 'existed in a previous released version of keystone with ' + 'the extra column included, the model should use ' + '"ModelDictMixin" instead.' + ) @classmethod def from_dict(cls, d): @@ -210,8 +218,11 @@ class ModelDictMixinWithExtras(models.ModelBase): # programmers NOT end users. raise AttributeError(cls._msg) # no qa - new_d['extra'] = {k: new_d.pop(k) for k in d.keys() - if k not in cls.attributes and k != 'extra'} + new_d['extra'] = { + k: new_d.pop(k) + for k in d.keys() + if k not in cls.attributes and k != 'extra' + } return cls(**new_d) @@ -292,6 +303,7 @@ def _get_context(): # NOTE(dims): Delay the `threading.local` import to allow for # eventlet/gevent monkeypatching to happen import threading + _CONTEXT = threading.local() return _CONTEXT @@ -375,6 +387,7 @@ def _filter(model, query, hints): :returns: query updated with any filters satisfied """ + def inexact_filter(model, query, filter_, satisfied_filters): """Apply an inexact filter to a query. @@ -443,11 +456,11 @@ def _filter(model, query, hints): if filter_['name'] not in model.attributes: continue if filter_['comparator'] == 'equals': - query = exact_filter(model, query, filter_, - satisfied_filters) + query = exact_filter(model, query, filter_, satisfied_filters) else: - query = inexact_filter(model, query, filter_, - satisfied_filters) + query = inexact_filter( + model, query, filter_, satisfied_filters + ) # Remove satisfied filters, then the caller will know remaining filters for filter_ in satisfied_filters: @@ -531,8 +544,10 @@ def handle_conflicts(conflict_type='object'): # LOG the exception for debug purposes, do not send the # exception details out with the raised Conflict exception # as it can contain raw SQL. - LOG.debug(_conflict_msg, {'conflict_type': conflict_type, - 'details': e}) + LOG.debug( + _conflict_msg, + {'conflict_type': conflict_type, 'details': e}, + ) name = None field = None domain_id = None @@ -553,16 +568,17 @@ def handle_conflicts(conflict_type='object'): domain_id = arg['domain_id'] msg = _('Duplicate entry') if name and domain_id: - msg = _('Duplicate entry found with %(field)s %(name)s ' - 'at domain ID %(domain_id)s') % { - 'field': field, 'name': name, 'domain_id': domain_id} + msg = _( + 'Duplicate entry found with %(field)s %(name)s ' + 'at domain ID %(domain_id)s' + ) % {'field': field, 'name': name, 'domain_id': domain_id} elif name: - msg = _('Duplicate entry found with %(field)s ' - '%(name)s') % {'field': field, 'name': name} + msg = _( + 'Duplicate entry found with %(field)s ' '%(name)s' + ) % {'field': field, 'name': name} elif domain_id: - msg = (_('Duplicate entry at domain ID %s') % domain_id) - raise exception.Conflict(type=conflict_type, - details=msg) + msg = _('Duplicate entry at domain ID %s') % domain_id + raise exception.Conflict(type=conflict_type, details=msg) except db_exception.DBError as e: # TODO(blk-u): inspecting inner_exception breaks encapsulation; # oslo_db should provide exception we need. @@ -570,17 +586,24 @@ def handle_conflicts(conflict_type='object'): # LOG the exception for debug purposes, do not send the # exception details out with the raised Conflict exception # as it can contain raw SQL. - LOG.debug(_conflict_msg, {'conflict_type': conflict_type, - 'details': e}) + LOG.debug( + _conflict_msg, + {'conflict_type': conflict_type, 'details': e}, + ) # NOTE(morganfainberg): This is really a case where the SQL # failed to store the data. This is not something that the # user has done wrong. Example would be a ForeignKey is # missing; the code that is executed before reaching the # SQL writing to the DB should catch the issue. raise exception.UnexpectedError( - _('An unexpected error occurred when trying to ' - 'store %s') % conflict_type) + _( + 'An unexpected error occurred when trying to ' + 'store %s' + ) + % conflict_type + ) raise return wrapper + return decorator diff --git a/keystone/common/sql/migrations/env.py b/keystone/common/sql/migrations/env.py index 2cc6a619ce..333d5aa5bc 100644 --- a/keystone/common/sql/migrations/env.py +++ b/keystone/common/sql/migrations/env.py @@ -33,11 +33,9 @@ target_metadata = core.ModelBase.metadata def include_object(object, name, type_, reflected, compare_to): - BORKED_COLUMNS = ( - ) + BORKED_COLUMNS = () - BORKED_UNIQUE_CONSTRAINTS = ( - ) + BORKED_UNIQUE_CONSTRAINTS = () BORKED_FK_CONSTRAINTS = ( # removed fks diff --git a/keystone/common/sql/migrations/versions/2024.01/expand/47147121_add_identity_federation_attribute_mapping_schema_version.py b/keystone/common/sql/migrations/versions/2024.01/expand/47147121_add_identity_federation_attribute_mapping_schema_version.py index 9e35cc3de2..8c060e0d97 100644 --- a/keystone/common/sql/migrations/versions/2024.01/expand/47147121_add_identity_federation_attribute_mapping_schema_version.py +++ b/keystone/common/sql/migrations/versions/2024.01/expand/47147121_add_identity_federation_attribute_mapping_schema_version.py @@ -30,6 +30,9 @@ depends_on = None def upgrade(): - op.add_column("mapping", Column('schema_version', - String(5), nullable=False, - server_default="1.0")) + op.add_column( + "mapping", + Column( + 'schema_version', String(5), nullable=False, server_default="1.0" + ), + ) diff --git a/keystone/common/sql/migrations/versions/27e647c0fad4_initial_version.py b/keystone/common/sql/migrations/versions/27e647c0fad4_initial_version.py index 084e25a399..5d1c228d55 100644 --- a/keystone/common/sql/migrations/versions/27e647c0fad4_initial_version.py +++ b/keystone/common/sql/migrations/versions/27e647c0fad4_initial_version.py @@ -987,7 +987,8 @@ def upgrade(): 'Credential migration in progress. Cannot perform ' 'writes to credential table.' ) - credential_update_trigger = textwrap.dedent(f""" + credential_update_trigger = textwrap.dedent( + f""" CREATE OR REPLACE FUNCTION keystone_read_only_update() RETURNS trigger AS $BODY$ @@ -1001,7 +1002,8 @@ def upgrade(): RETURN NEW; END $BODY$ LANGUAGE plpgsql; - """) + """ + ) op.execute(credential_update_trigger) error_message = ( @@ -1009,7 +1011,8 @@ def upgrade(): 'insert new rows into the identity_provider table at ' 'this time.' ) - identity_provider_insert_trigger = textwrap.dedent(f""" + identity_provider_insert_trigger = textwrap.dedent( + f""" CREATE OR REPLACE FUNCTION keystone_read_only_insert() RETURNS trigger AS $BODY$ @@ -1017,10 +1020,12 @@ def upgrade(): RAISE EXCEPTION '{error_message}'; END $BODY$ LANGUAGE plpgsql; - """) + """ + ) op.execute(identity_provider_insert_trigger) - federated_user_insert_trigger = textwrap.dedent(""" + federated_user_insert_trigger = textwrap.dedent( + """ CREATE OR REPLACE FUNCTION update_federated_user_domain_id() RETURNS trigger AS $BODY$ @@ -1031,10 +1036,12 @@ def upgrade(): RETURN NULL; END $BODY$ LANGUAGE plpgsql; - """) + """ + ) op.execute(federated_user_insert_trigger) - local_user_insert_trigger = textwrap.dedent(""" + local_user_insert_trigger = textwrap.dedent( + """ CREATE OR REPLACE FUNCTION update_user_domain_id() RETURNS trigger AS $BODY$ @@ -1044,7 +1051,8 @@ def upgrade(): RETURN NULL; END $BODY$ LANGUAGE plpgsql; - """) + """ + ) op.execute(local_user_insert_trigger) # FIXME(stephenfin): Remove these indexes. They're left over from attempts diff --git a/keystone/common/sql/upgrades.py b/keystone/common/sql/upgrades.py index 3fb699fd97..409611926d 100644 --- a/keystone/common/sql/upgrades.py +++ b/keystone/common/sql/upgrades.py @@ -77,7 +77,8 @@ def _find_alembic_conf(): :returns: An instance of ``alembic.config.Config`` """ path = os.path.join( - os.path.abspath(os.path.dirname(__file__)), 'alembic.ini', + os.path.abspath(os.path.dirname(__file__)), + 'alembic.ini', ) config = alembic_config.Config(os.path.abspath(path)) @@ -138,8 +139,9 @@ def get_current_heads(): # design) uses *python* interpolation to write the string out ... where # "%" is the special python interpolation character! Avoid this # mismatch by quoting all %'s for the set below. - engine_url = engine.url.render_as_string( - hide_password=False).replace('%', '%%') + engine_url = engine.url.render_as_string(hide_password=False).replace( + '%', '%%' + ) config.set_main_option('sqlalchemy.url', engine_url) heads = _get_current_heads(engine, config) @@ -180,8 +182,9 @@ def get_db_version(branch=EXPAND_BRANCH, *, engine=None): # design) uses *python* interpolation to write the string out ... where # "%" is the special python interpolation character! Avoid this # mismatch by quoting all %'s for the set below. - engine_url = engine.url.render_as_string( - hide_password=False).replace('%', '%%') + engine_url = engine.url.render_as_string(hide_password=False).replace( + '%', '%%' + ) config.set_main_option('sqlalchemy.url', engine_url) # we use '.get' since the particular branch might not have been created @@ -206,8 +209,9 @@ def _db_sync(branch=None, *, engine=None): # design) uses *python* interpolation to write the string out ... where # "%" is the special python interpolation character! Avoid this # mismatch by quoting all %'s for the set below. - engine_url = engine.url.render_as_string( - hide_password=False).replace('%', '%%') + engine_url = engine.url.render_as_string(hide_password=False).replace( + '%', '%%' + ) config.set_main_option('sqlalchemy.url', engine_url) _upgrade_alembic(engine, config, branch) diff --git a/keystone/common/tokenless_auth.py b/keystone/common/tokenless_auth.py index 0d651d9750..4fcb8987a8 100644 --- a/keystone/common/tokenless_auth.py +++ b/keystone/common/tokenless_auth.py @@ -68,8 +68,10 @@ class TokenlessAuthHelper(provider_api.ProviderAPIMixin, object): elif project_domain_name: scope['project']['domain'] = {'name': project_domain_name} else: - msg = _('Neither Project Domain ID nor Project Domain Name ' - 'was provided.') + msg = _( + 'Neither Project Domain ID nor Project Domain Name ' + 'was provided.' + ) raise exception.ValidationError(msg) elif domain_id: scope['domain'] = {'id': domain_id} @@ -77,8 +79,8 @@ class TokenlessAuthHelper(provider_api.ProviderAPIMixin, object): scope['domain'] = {'name': domain_name} else: raise exception.ValidationError( - attribute='project or domain', - target='scope') + attribute='project or domain', target='scope' + ) return scope def get_scope(self): @@ -107,11 +109,16 @@ class TokenlessAuthHelper(provider_api.ProviderAPIMixin, object): :rtype: dict """ idp_id = self._build_idp_id() - LOG.debug('The IdP Id %s and protocol Id %s are used to look up ' - 'the mapping.', idp_id, CONF.tokenless_auth.protocol) + LOG.debug( + 'The IdP Id %s and protocol Id %s are used to look up ' + 'the mapping.', + idp_id, + CONF.tokenless_auth.protocol, + ) mapped_properties, mapping_id = self.federation_api.evaluate( - idp_id, CONF.tokenless_auth.protocol, self.env) + idp_id, CONF.tokenless_auth.protocol, self.env + ) user = mapped_properties.get('user', {}) user_id = user.get('id') @@ -129,23 +136,28 @@ class TokenlessAuthHelper(provider_api.ProviderAPIMixin, object): if user_type == utils.UserType.EPHEMERAL: user_ref = {'type': utils.UserType.EPHEMERAL} group_ids = mapped_properties['group_ids'] - utils.validate_mapped_group_ids(group_ids, - mapping_id, - self.identity_api) + utils.validate_mapped_group_ids( + group_ids, mapping_id, self.identity_api + ) group_ids.extend( utils.transform_to_group_ids( - mapped_properties['group_names'], mapping_id, - self.identity_api, self.resource_api)) - roles = self.assignment_api.get_roles_for_groups(group_ids, - project_id, - domain_id) + mapped_properties['group_names'], + mapping_id, + self.identity_api, + self.resource_api, + ) + ) + roles = self.assignment_api.get_roles_for_groups( + group_ids, project_id, domain_id + ) if roles is not None: role_names = [role['name'] for role in roles] user_ref['roles'] = role_names user_ref['group_ids'] = list(group_ids) user_ref[federation_constants.IDENTITY_PROVIDER] = idp_id user_ref[federation_constants.PROTOCOL] = ( - CONF.tokenless_auth.protocol) + CONF.tokenless_auth.protocol + ) return user_ref if user_id: @@ -153,20 +165,25 @@ class TokenlessAuthHelper(provider_api.ProviderAPIMixin, object): elif user_name and (user_domain_name or user_domain_id): if user_domain_name: user_domain = self.resource_api.get_domain_by_name( - user_domain_name) - self.resource_api.assert_domain_enabled(user_domain['id'], - user_domain) + user_domain_name + ) + self.resource_api.assert_domain_enabled( + user_domain['id'], user_domain + ) user_domain_id = user_domain['id'] - user_ref = self.identity_api.get_user_by_name(user_name, - user_domain_id) + user_ref = self.identity_api.get_user_by_name( + user_name, user_domain_id + ) else: - msg = _('User auth cannot be built due to missing either ' - 'user id, or user name with domain id, or user name ' - 'with domain name.') + msg = _( + 'User auth cannot be built due to missing either ' + 'user id, or user name with domain id, or user name ' + 'with domain name.' + ) raise exception.ValidationError(msg) self.identity_api.assert_user_enabled( - user_id=user_ref['id'], - user=user_ref) + user_id=user_ref['id'], user=user_ref + ) user_ref['type'] = utils.UserType.LOCAL return user_ref @@ -184,7 +201,8 @@ class TokenlessAuthHelper(provider_api.ProviderAPIMixin, object): idp = self.env.get(CONF.tokenless_auth.issuer_attribute) if idp is None: raise exception.TokenlessAuthConfigError( - issuer_attribute=CONF.tokenless_auth.issuer_attribute) + issuer_attribute=CONF.tokenless_auth.issuer_attribute + ) hashed_idp = hashlib.sha256(idp.encode('utf-8')) return hashed_idp.hexdigest() diff --git a/keystone/common/utils.py b/keystone/common/utils.py index b24e11f8e6..5b2849efa3 100644 --- a/keystone/common/utils.py +++ b/keystone/common/utils.py @@ -43,8 +43,12 @@ from keystone.i18n import _ CONF = keystone.conf.CONF LOG = log.getLogger(__name__) WHITELISTED_PROPERTIES = [ - 'tenant_id', 'project_id', 'user_id', - 'compute_host', 'public_endpoint', ] + 'tenant_id', + 'project_id', + 'user_id', + 'compute_host', + 'public_endpoint', +] # NOTE(stevermar): This UUID must stay the same, forever, across @@ -53,7 +57,9 @@ WHITELISTED_PROPERTIES = [ RESOURCE_ID_NAMESPACE = uuid.UUID('4332ecab-770b-4288-a680-b9aca3b1b153') # Compatibilty for password hashing functions. -verify_length_and_trunc_password = password_hashing.verify_length_and_trunc_password # noqa +verify_length_and_trunc_password = ( + password_hashing.verify_length_and_trunc_password +) # noqa hash_password = password_hashing.hash_password hash_user_password = password_hashing.hash_user_password check_password = password_hashing.check_password @@ -75,8 +81,12 @@ def resource_uuid(value): except ValueError: if len(value) <= 64: return uuid.uuid5(RESOURCE_ID_NAMESPACE, value).hex - raise ValueError(_('Length of transformable resource id > 64, ' - 'which is max allowed characters')) + raise ValueError( + _( + 'Length of transformable resource id > 64, ' + 'which is max allowed characters' + ) + ) def flatten_dict(d, parent_key=''): @@ -157,16 +167,19 @@ def setup_remote_pydev_debug(): except ImportError: import pydevd - pydevd.settrace(CONF.pydev_debug_host, - port=CONF.pydev_debug_port, - stdoutToServer=True, - stderrToServer=True) + pydevd.settrace( + CONF.pydev_debug_host, + port=CONF.pydev_debug_port, + stdoutToServer=True, + stderrToServer=True, + ) return True except Exception: LOG.exception( 'Error setting up the debug environment. Verify that the ' 'option --debug-url has the format : and that a ' - 'debugger processes is listening on that port.') + 'debugger processes is listening on that port.' + ) raise @@ -219,10 +232,11 @@ def get_unix_user(user=None): elif user is None: user_info = pwd.getpwuid(os.geteuid()) else: - user_cls_name = reflection.get_class_name(user, - fully_qualified=False) - raise TypeError('user must be string, int or None; not %s (%r)' % - (user_cls_name, user)) + user_cls_name = reflection.get_class_name(user, fully_qualified=False) + raise TypeError( + 'user must be string, int or None; not %s (%r)' + % (user_cls_name, user) + ) return user_info.pw_uid, user_info.pw_name @@ -279,10 +293,13 @@ def get_unix_group(group=None): elif group is None: group_info = grp.getgrgid(os.getegid()) else: - group_cls_name = reflection.get_class_name(group, - fully_qualified=False) - raise TypeError('group must be string, int or None; not %s (%r)' % - (group_cls_name, group)) + group_cls_name = reflection.get_class_name( + group, fully_qualified=False + ) + raise TypeError( + 'group must be string, int or None; not %s (%r)' + % (group_cls_name, group) + ) return group_info.gr_gid, group_info.gr_name @@ -337,12 +354,14 @@ def isotime(at=None, subsecond=False): # NOTE(lbragstad): Datetime objects are immutable, so reassign the date we # are working with to itself as we drop microsecond precision. at = at.replace(microsecond=0) - st = at.strftime(_ISO8601_TIME_FORMAT - if not subsecond - else _ISO8601_TIME_FORMAT_SUBSECOND) + st = at.strftime( + _ISO8601_TIME_FORMAT + if not subsecond + else _ISO8601_TIME_FORMAT_SUBSECOND + ) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' # Need to handle either iso8601 or python UTC format - st += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz) + st += 'Z' if tz in ['UTC', 'UTC+00:00'] else tz return st @@ -414,8 +433,8 @@ def format_url(url, substitutions, silent_keyerror_failures=None): """ substitutions = WhiteListedItemFilter( - WHITELISTED_PROPERTIES, - substitutions) + WHITELISTED_PROPERTIES, substitutions + ) allow_keyerror = silent_keyerror_failures or [] try: result = url.replace('$(', '%(') % substitutions @@ -431,13 +450,17 @@ def format_url(url, substitutions, silent_keyerror_failures=None): else: result = None except TypeError as e: - msg = ("Malformed endpoint '%(url)s'. The following type error " - "occurred during string substitution: %(typeerror)s") + msg = ( + "Malformed endpoint '%(url)s'. The following type error " + "occurred during string substitution: %(typeerror)s" + ) LOG.error(msg, {"url": url, "typeerror": e}) raise exception.MalformedEndpoint(endpoint=url) except ValueError: - msg = ("Malformed endpoint %s - incomplete format " - "(are you missing a type notifier ?)") + msg = ( + "Malformed endpoint %s - incomplete format " + "(are you missing a type notifier ?)" + ) LOG.error(msg, url) raise exception.MalformedEndpoint(endpoint=url) return result @@ -541,19 +564,19 @@ def create_directory(directory, keystone_user_id=None, keystone_group_id=None): LOG.error( 'Failed to create %s: either it already ' 'exists or you don\'t have sufficient permissions to ' - 'create it', directory + 'create it', + directory, ) if keystone_user_id and keystone_group_id: - os.chown( - directory, - keystone_user_id, - keystone_group_id) + os.chown(directory, keystone_user_id, keystone_group_id) elif keystone_user_id or keystone_group_id: LOG.warning( 'Unable to change the ownership of key repository without ' 'a keystone user ID and keystone group ID both being ' - 'provided: %s', directory) + 'provided: %s', + directory, + ) @contextlib.contextmanager diff --git a/keystone/common/validation/parameter_types.py b/keystone/common/validation/parameter_types.py index 2ceefea623..9a03665fad 100644 --- a/keystone/common/validation/parameter_types.py +++ b/keystone/common/validation/parameter_types.py @@ -11,10 +11,7 @@ # under the License. """Common parameter types for validating a request reference.""" -boolean = { - 'type': 'boolean', - 'enum': [True, False] -} +boolean = {'type': 'boolean', 'enum': [True, False]} # NOTE(lbragstad): Be mindful of this pattern as it might require changes # once this is used on user names, LDAP-based user names specifically since @@ -26,14 +23,10 @@ name = { 'type': 'string', 'minLength': 1, 'maxLength': 255, - 'pattern': r'[\S]+' + 'pattern': r'[\S]+', } -external_id_string = { - 'type': 'string', - 'minLength': 1, - 'maxLength': 64 -} +external_id_string = {'type': 'string', 'minLength': 1, 'maxLength': 64} id_string = { 'type': 'string', @@ -41,19 +34,17 @@ id_string = { 'maxLength': 64, # TODO(lbragstad): Find a way to make this configurable such that the end # user chooses how much control they want over id_strings with a regex - 'pattern': r'^[a-zA-Z0-9-]+$' + 'pattern': r'^[a-zA-Z0-9-]+$', } mapping_id_string = { 'type': 'string', 'minLength': 1, 'maxLength': 64, - 'pattern': '^[a-zA-Z0-9-_]+$' + 'pattern': '^[a-zA-Z0-9-_]+$', } -description = { - 'type': 'string' -} +description = {'type': 'string'} url = { 'type': 'string', @@ -62,15 +53,9 @@ url = { # NOTE(edmondsw): we could do more to validate per various RFCs, but # decision was made to err on the side of leniency. The following is based # on rfc1738 section 2.1 - 'pattern': '^[a-zA-Z0-9+.-]+:.+' + 'pattern': '^[a-zA-Z0-9+.-]+:.+', } -email = { - 'type': 'string', - 'format': 'email' -} +email = {'type': 'string', 'format': 'email'} -integer_min0 = { - 'type': 'integer', - 'minimum': 0 -} +integer_min0 = {'type': 'integer', 'minimum': 0} diff --git a/keystone/common/validation/validators.py b/keystone/common/validation/validators.py index 94fbbbd1d3..63428f3000 100644 --- a/keystone/common/validation/validators.py +++ b/keystone/common/validation/validators.py @@ -35,15 +35,20 @@ def validate_password(password): try: if not re.match(pattern, password): pattern_desc = ( - CONF.security_compliance.password_regex_description) + CONF.security_compliance.password_regex_description + ) raise exception.PasswordRequirementsValidationError( - detail=pattern_desc) + detail=pattern_desc + ) except re.error: - msg = ("Unable to validate password due to invalid regular " - "expression - password_regex: %s") + msg = ( + "Unable to validate password due to invalid regular " + "expression - password_regex: %s" + ) LOG.error(msg, pattern) - detail = _("Unable to validate password due to invalid " - "configuration") + detail = _( + "Unable to validate password due to invalid " "configuration" + ) raise exception.PasswordValidationError(detail=detail) @@ -61,8 +66,9 @@ class SchemaValidator(object): # didn't by default. See the Nova V3 validator for details on how this # is done. validators = {} - validator_cls = jsonschema.validators.extend(self.validator_org, - validators) + validator_cls = jsonschema.validators.extend( + self.validator_org, validators + ) fc = jsonschema.FormatChecker() self.validator = validator_cls(schema, format_checker=fc) @@ -81,9 +87,9 @@ class SchemaValidator(object): # we don't expose sensitive user information in the event it # fails validation. path = '/'.join(map(str, ex.path)) - detail = _("Invalid input for field '%(path)s': " - "%(message)s") % {'path': path, - 'message': str(ex)} + detail = _( + "Invalid input for field '%(path)s': " "%(message)s" + ) % {'path': path, 'message': str(ex)} else: detail = str(ex) raise exception.SchemaValidationError(detail=detail) diff --git a/keystone/conf/__init__.py b/keystone/conf/__init__.py index d980d248b3..53ece0e0cb 100644 --- a/keystone/conf/__init__.py +++ b/keystone/conf/__init__.py @@ -88,7 +88,7 @@ conf_modules = [ totp, trust, unified_limit, - wsgi + wsgi, ] @@ -111,8 +111,10 @@ def set_default_for_default_log_levels(): ] log.register_options(CONF) - log.set_defaults(default_log_levels=log.get_default_log_levels() + - extra_log_level_defaults) + log.set_defaults( + default_log_levels=log.get_default_log_levels() + + extra_log_level_defaults + ) def setup_logging(): @@ -138,25 +140,25 @@ def configure(conf=None): def set_external_opts_defaults(): """Update default configuration options for oslo.middleware.""" cors.set_defaults( - allow_headers=['X-Auth-Token', - 'X-Openstack-Request-Id', - 'X-Subject-Token', - 'X-Project-Id', - 'X-Project-Name', - 'X-Project-Domain-Id', - 'X-Project-Domain-Name', - 'X-Domain-Id', - 'X-Domain-Name', - 'Openstack-Auth-Receipt'], - expose_headers=['X-Auth-Token', - 'X-Openstack-Request-Id', - 'X-Subject-Token', - 'Openstack-Auth-Receipt'], - allow_methods=['GET', - 'PUT', - 'POST', - 'DELETE', - 'PATCH'] + allow_headers=[ + 'X-Auth-Token', + 'X-Openstack-Request-Id', + 'X-Subject-Token', + 'X-Project-Id', + 'X-Project-Name', + 'X-Project-Domain-Id', + 'X-Project-Domain-Name', + 'X-Domain-Id', + 'X-Domain-Name', + 'Openstack-Auth-Receipt', + ], + expose_headers=[ + 'X-Auth-Token', + 'X-Openstack-Request-Id', + 'X-Subject-Token', + 'Openstack-Auth-Receipt', + ], + allow_methods=['GET', 'PUT', 'POST', 'DELETE', 'PATCH'], ) # configure OSprofiler options diff --git a/keystone/conf/application_credential.py b/keystone/conf/application_credential.py index 9881e7b073..a86caf5af4 100644 --- a/keystone/conf/application_credential.py +++ b/keystone/conf/application_credential.py @@ -18,37 +18,49 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the application credential backend driver in the `keystone.application_credential` namespace. Keystone only provides a `sql` driver, so there is no reason to change this unless you are providing a custom entry point. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for application credential caching. This has no effect unless global caching is enabled. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', - help=utils.fmt(""" + help=utils.fmt( + """ Time to cache application credential data in seconds. This has no effect unless global caching is enabled. -""")) +""" + ), +) user_limit = cfg.IntOpt( 'user_limit', default=-1, - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of application credentials a user is permitted to create. A value of -1 means unlimited. If a limit is not set, users are permitted to create application credentials at will, which could lead to bloat in the keystone database or open keystone to a DoS attack. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/assignment.py b/keystone/conf/assignment.py index 72c2e8c20c..d981b840b6 100644 --- a/keystone/conf/assignment.py +++ b/keystone/conf/assignment.py @@ -18,26 +18,29 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the assignment backend driver (where role assignments are stored) in the `keystone.assignment` namespace. Only a SQL driver is supplied by keystone itself. Unless you are writing proprietary drivers for keystone, you do not need to set this option. -""")) +""" + ), +) prohibited_implied_role = cfg.ListOpt( 'prohibited_implied_role', default=['admin'], - help=utils.fmt(""" + help=utils.fmt( + """ A list of role names which are prohibited from being an implied role. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] -ALL_OPTS = [ - driver, - prohibited_implied_role -] +ALL_OPTS = [driver, prohibited_implied_role] def register_opts(conf): diff --git a/keystone/conf/auth.py b/keystone/conf/auth.py index d53fc15e39..1515c92ecc 100644 --- a/keystone/conf/auth.py +++ b/keystone/conf/auth.py @@ -19,34 +19,44 @@ from keystone.conf import utils methods = cfg.ListOpt( 'methods', default=constants._DEFAULT_AUTH_METHODS, - help=utils.fmt(""" + help=utils.fmt( + """ Allowed authentication methods. Note: You should disable the `external` auth method if you are currently using federation. External auth and federation both use the REMOTE_USER variable. Since both the mapped and external plugin are being invoked to validate attributes in the request environment, it can cause conflicts. -""")) +""" + ), +) password = cfg.StrOpt( # nosec : This is the name of the plugin, not - 'password', # a password that needs to be protected. - help=utils.fmt(""" + 'password', # a password that needs to be protected. + help=utils.fmt( + """ Entry point for the password auth plugin module in the `keystone.auth.password` namespace. You do not need to set this unless you are overriding keystone's own password authentication plugin. -""")) +""" + ), +) token = cfg.StrOpt( 'token', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the token auth plugin module in the `keystone.auth.token` namespace. You do not need to set this unless you are overriding keystone's own token authentication plugin. -""")) +""" + ), +) # deals with REMOTE_USER authentication external = cfg.StrOpt( 'external', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the external (`REMOTE_USER`) auth plugin module in the `keystone.auth.external` namespace. Supplied drivers are `DefaultDomain` and `Domain`. The default driver is `DefaultDomain`, which assumes that all users @@ -58,32 +68,43 @@ then the default domain will be used instead). You do not need to set this unless you are taking advantage of "external authentication", where the application server (such as Apache) is handling authentication instead of keystone. -""")) +""" + ), +) oauth1 = cfg.StrOpt( 'oauth1', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the OAuth 1.0a auth plugin module in the `keystone.auth.oauth1` namespace. You do not need to set this unless you are overriding keystone's own `oauth1` authentication plugin. -""")) +""" + ), +) mapped = cfg.StrOpt( 'mapped', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the mapped auth plugin module in the `keystone.auth.mapped` namespace. You do not need to set this unless you are overriding keystone's own `mapped` authentication plugin. -""")) +""" + ), +) application_credential = cfg.StrOpt( 'application_credential', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the application_credential auth plugin module in the `keystone.auth.application_credential` namespace. You do not need to set this unless you are overriding keystone's own `application_credential` authentication plugin. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/catalog.py b/keystone/conf/catalog.py index 4299c42166..6dadf52277 100644 --- a/keystone/conf/catalog.py +++ b/keystone/conf/catalog.py @@ -18,46 +18,61 @@ from keystone.conf import utils template_file = cfg.StrOpt( 'template_file', default='default_catalog.templates', - help=utils.fmt(""" + help=utils.fmt( + """ Absolute path to the file used for the templated catalog backend. This option is only used if the `[catalog] driver` is set to `templated`. -""")) +""" + ), +) driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the catalog driver in the `keystone.catalog` namespace. Keystone provides a `sql` option (which supports basic CRUD operations through SQL), a `templated` option (which loads the catalog from a templated catalog file on disk), and a `endpoint_filter.sql` option (which supports arbitrary service catalogs per project). -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for catalog caching. This has no effect unless global caching is enabled. In a typical deployment, there is no reason to disable this. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', - help=utils.fmt(""" + help=utils.fmt( + """ Time to cache catalog data (in seconds). This has no effect unless global and catalog caching are both enabled. Catalog data (services, endpoints, etc.) typically does not change frequently, and so a longer duration than the global default may be desirable. -""")) +""" + ), +) list_limit = cfg.IntOpt( 'list_limit', - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of entities that will be returned in a catalog collection. There is typically no reason to set this, as it would be unusual for a deployment to have enough services or endpoints to exceed a reasonable limit. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/constants.py b/keystone/conf/constants.py index 893352d593..7b9923e37f 100644 --- a/keystone/conf/constants.py +++ b/keystone/conf/constants.py @@ -17,8 +17,14 @@ package. """ -_DEFAULT_AUTH_METHODS = ['external', 'password', 'token', 'oauth1', 'mapped', - 'application_credential'] +_DEFAULT_AUTH_METHODS = [ + 'external', + 'password', + 'token', + 'oauth1', + 'mapped', + 'application_credential', +] _CERTFILE = '/etc/keystone/ssl/certs/signing_cert.pem' _KEYFILE = '/etc/keystone/ssl/private/signing_key.pem' diff --git a/keystone/conf/credential.py b/keystone/conf/credential.py index 9dcb870c71..1532b1882c 100644 --- a/keystone/conf/credential.py +++ b/keystone/conf/credential.py @@ -18,66 +18,87 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the credential backend driver in the `keystone.credential` namespace. Keystone only provides a `sql` driver, so there's no reason to change this unless you are providing a custom entry point. -""")) +""" + ), +) provider = cfg.StrOpt( 'provider', default='fernet', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for credential encryption and decryption operations in the `keystone.credential.provider` namespace. Keystone only provides a `fernet` driver, so there's no reason to change this unless you are providing a custom entry point to encrypt and decrypt credentials. -""")) +""" + ), +) key_repository = cfg.StrOpt( 'key_repository', default='/etc/keystone/credential-keys/', - help=utils.fmt(""" + help=utils.fmt( + """ Directory containing Fernet keys used to encrypt and decrypt credentials stored in the credential backend. Fernet keys used to encrypt credentials have no relationship to Fernet keys used to encrypt Fernet tokens. Both sets of keys should be managed separately and require different rotation policies. Do not share this repository with the repository used to manage keys for Fernet tokens. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for caching only on retrieval of user credentials. This has no effect unless global caching is enabled. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', - help=utils.fmt(""" + help=utils.fmt( + """ Time to cache credential data in seconds. This has no effect unless global caching is enabled. -""")) +""" + ), +) auth_ttl = cfg.IntOpt( 'auth_ttl', default=15, - help=utils.fmt(""" + help=utils.fmt( + """ The length of time in minutes for which a signed EC2 or S3 token request is valid from the timestamp contained in the token request. -""")) +""" + ), +) user_limit = cfg.IntOpt( 'user_limit', default=-1, - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of credentials a user is permitted to create. A value of -1 means unlimited. If a limit is not set, users are permitted to create credentials at will, which could lead to bloat in the keystone database or open keystone to a DoS attack. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] ALL_OPTS = [ diff --git a/keystone/conf/default.py b/keystone/conf/default.py index 529a469865..bd155b67c3 100644 --- a/keystone/conf/default.py +++ b/keystone/conf/default.py @@ -18,18 +18,22 @@ from keystone.conf import utils admin_token = cfg.StrOpt( 'admin_token', secret=True, - help=utils.fmt(""" + help=utils.fmt( + """ Using this feature is *NOT* recommended. Instead, use the `keystone-manage bootstrap` command. The value of this option is treated as a "shared secret" that can be used to bootstrap Keystone through the API. This "token" does not represent a user (it has no identity), and carries no explicit authorization (it effectively bypasses most authorization checks). If set to `None`, the value is ignored and the `admin_token` middleware is effectively disabled. -""")) +""" + ), +) public_endpoint = cfg.URIOpt( 'public_endpoint', - help=utils.fmt(""" + help=utils.fmt( + """ The base public endpoint URL for Keystone that is advertised to clients (NOTE: this does NOT affect how Keystone listens for connections). Defaults to the base host URL of the request. For example, if keystone receives a request to @@ -37,23 +41,31 @@ base host URL of the request. For example, if keystone receives a request to treated as `http://server:5000`. You should only need to set option if either the value of the base URL contains a path that keystone does not automatically infer (`/prefix/v3`), or if the endpoint should be found on a different host. -""")) +""" + ), +) max_project_tree_depth = cfg.IntOpt( 'max_project_tree_depth', default=5, - help=utils.fmt(""" + help=utils.fmt( + """ Maximum depth of the project hierarchy, excluding the project acting as a domain at the top of the hierarchy. WARNING: Setting it to a large value may adversely impact performance. -""")) +""" + ), +) max_param_size = cfg.IntOpt( 'max_param_size', default=64, - help=utils.fmt(""" + help=utils.fmt( + """ Limit the sizes of user & project ID/names. -""")) +""" + ), +) # NOTE(breton): 255 is the size of the database columns used for ID fields. # This size is picked so that the tokens can be indexed in-place as opposed to @@ -61,67 +73,85 @@ Limit the sizes of user & project ID/names. max_token_size = cfg.IntOpt( 'max_token_size', default=255, - help=utils.fmt(""" + help=utils.fmt( + """ Similar to `[DEFAULT] max_param_size`, but provides an exception for token values. With Fernet tokens, this can be set as low as 255. -""")) +""" + ), +) list_limit = cfg.IntOpt( 'list_limit', - help=utils.fmt(""" + help=utils.fmt( + """ The maximum number of entities that will be returned in a collection. This global limit may be then overridden for a specific driver, by specifying a list_limit in the appropriate section (for example, `[assignment]`). No limit is set by default. In larger deployments, it is recommended that you set this to a reasonable number to prevent operations like listing all users and projects from placing an unnecessary load on the system. -""")) +""" + ), +) strict_password_check = cfg.BoolOpt( 'strict_password_check', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ If set to true, strict password length checking is performed for password manipulation. If a password exceeds the maximum length, the operation will fail with an HTTP 403 Forbidden error. If set to false, passwords are automatically truncated to the maximum length. -""")) +""" + ), +) insecure_debug = cfg.BoolOpt( 'insecure_debug', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ If set to true, then the server will return information in HTTP responses that may allow an unauthenticated or authenticated user to get more information than normal, such as additional details about why authentication failed. This may be useful for debugging but is insecure. -""")) +""" + ), +) default_publisher_id = cfg.StrOpt( 'default_publisher_id', - help=utils.fmt(""" + help=utils.fmt( + """ Default `publisher_id` for outgoing notifications. If left undefined, Keystone will default to using the server's host name. -""")) +""" + ), +) notification_format = cfg.StrOpt( 'notification_format', default='cadf', choices=['basic', 'cadf'], - help=utils.fmt(""" + help=utils.fmt( + """ Define the notification format for identity service events. A `basic` notification only has information about the resource being operated on. A `cadf` notification has the same information, as well as information about the initiator of the event. The `cadf` option is entirely backwards compatible with the `basic` option, but is fully CADF-compliant, and is recommended for auditing use cases. -""")) +""" + ), +) notification_opt_out = cfg.MultiStrOpt( 'notification_opt_out', - default=["identity.authenticate.success", - "identity.authenticate.pending"], - help=utils.fmt(""" + default=["identity.authenticate.success", "identity.authenticate.pending"], + help=utils.fmt( + """ You can reduce the number of notifications keystone emits by explicitly opting out. Keystone will not emit notifications that match the patterns expressed in this list. Values are expected to be in the form of @@ -132,7 +162,9 @@ example, the following suppresses notifications describing user creation or successful authentication events: notification_opt_out=identity.user.create notification_opt_out=identity.authenticate.success -""")) +""" + ), +) GROUP_NAME = 'DEFAULT' diff --git a/keystone/conf/domain_config.py b/keystone/conf/domain_config.py index 93814851f3..b64cb6d38e 100644 --- a/keystone/conf/domain_config.py +++ b/keystone/conf/domain_config.py @@ -18,46 +18,59 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the domain-specific configuration driver in the `keystone.resource.domain_config` namespace. Only a `sql` option is provided by keystone, so there is no reason to set this unless you are providing a custom entry point. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for caching of the domain-specific configuration backend. This has no effect unless global caching is enabled. There is normally no reason to disable this. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', default=300, - help=utils.fmt(""" + help=utils.fmt( + """ Time-to-live (TTL, in seconds) to cache domain-specific configuration data. This has no effect unless `[domain_config] caching` is enabled. -""")) +""" + ), +) additional_whitelisted_options = cfg.Opt( 'additional_whitelisted_options', type=cfg.types.Dict(value_type=cfg.types.List(bounds=True)), - help=utils.fmt(""" + help=utils.fmt( + """ Additional whitelisted domain-specific options for out-of-tree drivers. This is a dictonary of lists with the key being the group name and value a list -of group options.""") +of group options.""" + ), ) additional_sensitive_options = cfg.Opt( 'additional_sensitive_options', type=cfg.types.Dict(value_type=cfg.types.List(bounds=True)), - help=utils.fmt(""" + help=utils.fmt( + """ Additional sensitive domain-specific options for out-of-tree drivers. This is a dictonary of lists with the key being the group name and value a list -of group options.""") +of group options.""" + ), ) @@ -67,7 +80,7 @@ ALL_OPTS = [ caching, cache_time, additional_whitelisted_options, - additional_sensitive_options + additional_sensitive_options, ] diff --git a/keystone/conf/endpoint_filter.py b/keystone/conf/endpoint_filter.py index 0a0fdf1fe7..6c07affe24 100644 --- a/keystone/conf/endpoint_filter.py +++ b/keystone/conf/endpoint_filter.py @@ -18,22 +18,28 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the endpoint filter driver in the `keystone.endpoint_filter` namespace. Only a `sql` option is provided by keystone, so there is no reason to set this unless you are providing a custom entry point. -""")) +""" + ), +) return_all_endpoints_if_no_filter = cfg.BoolOpt( 'return_all_endpoints_if_no_filter', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ This controls keystone's behavior if the configured endpoint filters do not result in any endpoints for a user + project pair (and therefore a potentially empty service catalog). If set to true, keystone will return the entire service catalog. If set to false, keystone will return an empty service catalog. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/endpoint_policy.py b/keystone/conf/endpoint_policy.py index efbb3b4e56..43bc8e2f61 100644 --- a/keystone/conf/endpoint_policy.py +++ b/keystone/conf/endpoint_policy.py @@ -18,11 +18,14 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the endpoint policy driver in the `keystone.endpoint_policy` namespace. Only a `sql` driver is provided by keystone, so there is no reason to set this unless you are providing a custom entry point. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/federation.py b/keystone/conf/federation.py index 263543ed97..e5025bd81a 100644 --- a/keystone/conf/federation.py +++ b/keystone/conf/federation.py @@ -16,38 +16,49 @@ from oslo_log import versionutils from keystone.conf import utils -_DEPRECATED_MSG = utils.fmt(""" +_DEPRECATED_MSG = utils.fmt( + """ This option has been superseded by ephemeral users existing in the domain of their identity provider. -""") +""" +) driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the federation backend driver in the `keystone.federation` namespace. Keystone only provides a `sql` driver, so there is no reason to set this option unless you are providing a custom entry point. -""")) +""" + ), +) assertion_prefix = cfg.StrOpt( 'assertion_prefix', default='', - help=utils.fmt(""" + help=utils.fmt( + """ Prefix to use when filtering environment variable names for federated assertions. Matched variables are passed into the federated mapping engine. -""")) +""" + ), +) remote_id_attribute = cfg.StrOpt( 'remote_id_attribute', - help=utils.fmt(""" + help=utils.fmt( + """ Default value for all protocols to be used to obtain the entity ID of the Identity Provider from the environment. For `mod_shib`, this would be `Shib-Identity-Provider`. For `mod_auth_openidc`, this could be `HTTP_OIDC_ISS`. For `mod_auth_mellon`, this could be `MELLON_IDP`. This can be overridden on a per-protocol basis by providing a `remote_id_attribute` to the federation protocol using the API. -""")) +""" + ), +) federated_domain_name = cfg.StrOpt( 'federated_domain_name', @@ -55,64 +66,82 @@ federated_domain_name = cfg.StrOpt( deprecated_for_removal=True, deprecated_reason=_DEPRECATED_MSG, deprecated_since=versionutils.deprecated.TRAIN, - help=utils.fmt(""" + help=utils.fmt( + """ An arbitrary domain name that is reserved to allow federated ephemeral users to have a domain concept. Note that an admin will not be able to create a domain with this name or update an existing domain to this name. You are not advised to change this value unless you really have to. -""")) +""" + ), +) trusted_dashboard = cfg.MultiStrOpt( 'trusted_dashboard', default=[], - help=utils.fmt(""" + help=utils.fmt( + """ A list of trusted dashboard hosts. Before accepting a Single Sign-On request to return a token, the origin host must be a member of this list. This configuration option may be repeated for multiple values. You must set this in order to use web-based SSO flows. For example: trusted_dashboard=https://acme.example.com/auth/websso trusted_dashboard=https://beta.example.com/auth/websso -""")) +""" + ), +) sso_callback_template = cfg.StrOpt( 'sso_callback_template', default='/etc/keystone/sso_callback_template.html', - help=utils.fmt(""" + help=utils.fmt( + """ Absolute path to an HTML file used as a Single Sign-On callback handler. This page is expected to redirect the user from keystone back to a trusted dashboard host, by form encoding a token in a POST request. Keystone's default value should be sufficient for most deployments. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for federation caching. This has no effect unless global caching is enabled. There is typically no reason to disable this. -""")) +""" + ), +) default_authorization_ttl = cfg.IntOpt( 'default_authorization_ttl', default=0, - help=utils.fmt(""" + help=utils.fmt( + """ Default time in minutes for the validity of group memberships carried over from a mapping. Default is 0, which means disabled. -""")) +""" + ), +) attribute_mapping_default_schema_version = cfg.StrOpt( 'attribute_mapping_default_schema_version', default='1.0', - help=utils.fmt(""" + help=utils.fmt( + """ The attribute mapping default schema version to be used, if the attribute mapping being registered does not have a schema version. One must bear in mind that changing this value will have no effect on attribute mappings that were previously registered when another default value was applied. Once registered, one needs to update the attribute mapping schema via the update API to be able to change an attribute mapping schema version. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/fernet_receipts.py b/keystone/conf/fernet_receipts.py index 4ba077a43e..241dcac389 100644 --- a/keystone/conf/fernet_receipts.py +++ b/keystone/conf/fernet_receipts.py @@ -20,7 +20,8 @@ from keystone.conf import utils key_repository = cfg.StrOpt( 'key_repository', default='/etc/keystone/fernet-keys/', - help=utils.fmt(""" + help=utils.fmt( + """ Directory containing Fernet receipt keys. This directory must exist before using `keystone-manage fernet_setup` for the first time, must be writable by the user running `keystone-manage fernet_setup` or `keystone-manage @@ -41,19 +42,24 @@ unpack it on host B to a temporary location, and atomically move (`mv`) the directory into place on host B). Running `keystone-manage fernet_rotate` *twice* on a key repository without syncing other nodes will result in receipts that can not be validated by all nodes. -""")) +""" + ), +) max_active_keys = cfg.IntOpt( 'max_active_keys', default=3, min=1, - help=utils.fmt(""" + help=utils.fmt( + """ This controls how many keys are held in rotation by `keystone-manage fernet_rotate` before they are discarded. The default value of 3 means that keystone will maintain one staged key (always index 0), one primary key (the highest numerical index), and one secondary key (every other index). Increasing this value means that additional secondary keys will be kept in the rotation. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/fernet_tokens.py b/keystone/conf/fernet_tokens.py index 728838c0fb..e3ccc41aee 100644 --- a/keystone/conf/fernet_tokens.py +++ b/keystone/conf/fernet_tokens.py @@ -18,7 +18,8 @@ from keystone.conf import utils key_repository = cfg.StrOpt( 'key_repository', default='/etc/keystone/fernet-keys/', - help=utils.fmt(""" + help=utils.fmt( + """ Directory containing Fernet token keys. This directory must exist before using `keystone-manage fernet_setup` for the first time, must be writable by the user running `keystone-manage fernet_setup` or `keystone-manage fernet_rotate`, and @@ -39,19 +40,24 @@ temporary location, and atomically move (`mv`) the directory into place on host B). Running `keystone-manage fernet_rotate` *twice* on a key repository without syncing other nodes will result in tokens that can not be validated by all nodes. -""")) +""" + ), +) max_active_keys = cfg.IntOpt( 'max_active_keys', default=3, min=1, - help=utils.fmt(""" + help=utils.fmt( + """ This controls how many keys are held in rotation by `keystone-manage fernet_rotate` before they are discarded. The default value of 3 means that keystone will maintain one staged key (always index 0), one primary key (the highest numerical index), and one secondary key (every other index). Increasing this value means that additional secondary keys will be kept in the rotation. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/identity.py b/keystone/conf/identity.py index e50468e0cf..4fb7fc7572 100644 --- a/keystone/conf/identity.py +++ b/keystone/conf/identity.py @@ -19,7 +19,8 @@ from keystone.conf import utils default_domain_id = cfg.StrOpt( 'default_domain_id', default='default', - help=utils.fmt(""" + help=utils.fmt( + """ This references the domain to use for all Identity API v2 requests (which are not aware of domains). A domain with this ID can optionally be created for you by `keystone-manage bootstrap`. The domain referenced by this ID cannot be @@ -27,12 +28,15 @@ deleted on the v3 API, to prevent accidentally breaking the v2 API. There is nothing special about this domain, other than the fact that it must exist to order to maintain support for your v2 clients. There is typically no reason to change this value. -""")) +""" + ), +) domain_specific_drivers_enabled = cfg.BoolOpt( 'domain_specific_drivers_enabled', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ A subset (or all) of domains can have their own identity driver, each with their own partial configuration options, stored in either the resource backend or in a file in a domain configuration directory (depending on the setting of @@ -40,33 +44,42 @@ or in a file in a domain configuration directory (depending on the setting of domain need to be specified in this manner. This feature is disabled by default, but may be enabled by default in a future release; set to true to enable. -""")) +""" + ), +) domain_configurations_from_database = cfg.BoolOpt( 'domain_configurations_from_database', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ By default, domain-specific configuration data is read from files in the directory identified by `[identity] domain_config_dir`. Enabling this configuration option allows you to instead manage domain-specific configurations through the API, which are then persisted in the backend (typically, a SQL database), rather than using configuration files on disk. -""")) +""" + ), +) domain_config_dir = cfg.StrOpt( 'domain_config_dir', default='/etc/keystone/domains', - help=utils.fmt(""" + help=utils.fmt( + """ Absolute path where keystone should locate domain-specific `[identity]` configuration files. This option has no effect unless `[identity] domain_specific_drivers_enabled` is set to true. There is typically no reason to change this value. -""")) +""" + ), +) driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the identity backend driver in the `keystone.identity` namespace. Keystone provides a `sql` and `ldap` driver. This option is also used as the default driver selection (along with the other configuration @@ -75,54 +88,72 @@ domain_specific_drivers_enabled` is enabled, but no applicable domain-specific configuration is defined for the domain in question. Unless your deployment primarily relies on `ldap` AND is not using domain-specific configuration, you should typically leave this set to `sql`. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for identity caching. This has no effect unless global caching is enabled. There is typically no reason to disable this. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', default=600, - help=utils.fmt(""" + help=utils.fmt( + """ Time to cache identity data (in seconds). This has no effect unless global and identity caching are enabled. -""")) +""" + ), +) max_password_length = cfg.IntOpt( 'max_password_length', default=4096, max=passlib.utils.MAX_PASSWORD_SIZE, - help=utils.fmt(""" + help=utils.fmt( + """ Maximum allowed length for user passwords. Decrease this value to improve performance. Changing this value does not effect existing passwords. This value can also be overridden by certain hashing algorithms maximum allowed length which takes precedence over the configured value. The bcrypt max_password_length is 72 bytes. -""")) +""" + ), +) list_limit = cfg.IntOpt( 'list_limit', - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of entities that will be returned in an identity collection. -""")) +""" + ), +) password_hash_algorithm = cfg.StrOpt( 'password_hash_algorithm', choices=['bcrypt', 'bcrypt_sha256', 'scrypt', 'pbkdf2_sha512'], default='bcrypt', - help=utils.fmt(""" + help=utils.fmt( + """ The password hashing algorithm to use for passwords stored within keystone. -""")) +""" + ), +) password_hash_rounds = cfg.IntOpt( 'password_hash_rounds', - help=utils.fmt(""" + help=utils.fmt( + """ This option represents a trade off between security and performance. Higher values lead to slower performance, but higher security. Changing this option will only affect newly created passwords as existing password hashes already @@ -137,13 +168,16 @@ The default for pbkdf_sha512 is 60000, must be within `range(1,1<<32)` WARNING: If using scrypt, increasing this value increases BOTH time AND memory requirements to hash a password. -""")) +""" + ), +) salt_bytesize = cfg.IntOpt( 'salt_bytesize', min=0, max=96, - help=utils.fmt(""" + help=utils.fmt( + """ Number of bytes to use in scrypt and pbkfd2_sha512 hashing salt. Default for scrypt is 16 bytes. @@ -151,24 +185,32 @@ Default for pbkfd2_sha512 is 16 bytes. Limited to a maximum of 96 bytes due to the size of the column used to store password hashes. -""")) +""" + ), +) scrypt_block_size = cfg.IntOpt( 'scrypt_block_size', - help=utils.fmt(""" + help=utils.fmt( + """ Optional block size to pass to scrypt hash function (the `r` parameter). Useful for tuning scrypt to optimal performance for your CPU architecture. This option is only used when the `password_hash_algorithm` option is set to `scrypt`. Defaults to 8. -""")) +""" + ), +) scrypt_paralellism = cfg.IntOpt( 'scrypt_parallelism', - help=utils.fmt(""" + help=utils.fmt( + """ Optional parallelism to pass to scrypt hash function (the `p` parameter). This option is only used when the `password_hash_algorithm` option is set to `scrypt`. Defaults to 1. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] ALL_OPTS = [ diff --git a/keystone/conf/identity_mapping.py b/keystone/conf/identity_mapping.py index e7159613c1..3bae75a346 100644 --- a/keystone/conf/identity_mapping.py +++ b/keystone/conf/identity_mapping.py @@ -18,28 +18,35 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the identity mapping backend driver in the `keystone.identity.id_mapping` namespace. Keystone only provides a `sql` driver, so there is no reason to change this unless you are providing a custom entry point. -""")) +""" + ), +) generator = cfg.StrOpt( 'generator', default='sha256', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the public ID generator for user and group entities in the `keystone.identity.id_generator` namespace. The Keystone identity mapper only supports generators that produce 64 bytes or less. Keystone only provides a `sha256` entry point, so there is no reason to change this value unless you're providing a custom entry point. -""")) +""" + ), +) backward_compatible_ids = cfg.BoolOpt( 'backward_compatible_ids', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ The format of user and group IDs changed in Juno for backends that do not generate UUIDs (for example, LDAP), with keystone providing a hash mapping to the underlying attribute in LDAP. By default this mapping is disabled, which @@ -55,7 +62,9 @@ different IDs to clients than it did previously (existing IDs in the API will suddenly change). Typically this means that the only time you can set this value to false is when configuring a fresh installation, although that is the recommended value. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/jwt_tokens.py b/keystone/conf/jwt_tokens.py index 3b237e4aca..10e84901ae 100644 --- a/keystone/conf/jwt_tokens.py +++ b/keystone/conf/jwt_tokens.py @@ -18,18 +18,22 @@ from keystone.conf import utils jws_public_key_repository = cfg.StrOpt( 'jws_public_key_repository', default='/etc/keystone/jws-keys/public', - help=utils.fmt(""" + help=utils.fmt( + """ Directory containing public keys for validating JWS token signatures. This directory must exist in order for keystone's server process to start. It must also be readable by keystone's server process. It must contain at least one public key that corresponds to a private key in `keystone.conf [jwt_tokens] jws_private_key_repository`. This option is only applicable in deployments issuing JWS tokens and setting `keystone.conf [token] provider = jws`. -""")) +""" + ), +) jws_private_key_repository = cfg.StrOpt( 'jws_private_key_repository', default='/etc/keystone/jws-keys/private', - help=utils.fmt(""" + help=utils.fmt( + """ Directory containing private keys for signing JWS tokens. This directory must exist in order for keystone's server process to start. It must also be readable by keystone's server process. It must contain at least one private key that @@ -40,14 +44,13 @@ the future, keystone may support the ability to sign tokens with multiple private keys. For now, only a key named `private.pem` within this directory is required to issue JWS tokens. This option is only applicable in deployments issuing JWS tokens and setting `keystone.conf [token] provider = jws`. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] -ALL_OPTS = [ - jws_public_key_repository, - jws_private_key_repository -] +ALL_OPTS = [jws_public_key_repository, jws_private_key_repository] def register_opts(conf): diff --git a/keystone/conf/ldap.py b/keystone/conf/ldap.py index f4206ae835..43d31fffad 100644 --- a/keystone/conf/ldap.py +++ b/keystone/conf/ldap.py @@ -18,474 +18,633 @@ from keystone.conf import utils url = cfg.StrOpt( 'url', default='ldap://localhost', - help=utils.fmt(""" + help=utils.fmt( + """ URL(s) for connecting to the LDAP server. Multiple LDAP URLs may be specified as a comma separated string. The first URL to successfully bind is used for the connection. -""")) +""" + ), +) randomize_urls = cfg.BoolOpt( 'randomize_urls', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ Randomize the order of URLs in each keystone process. This makes the failure behavior more gradual, since if the first server is down, a process/thread will wait for the specified timeout before attempting a connection to a server further down the list. This defaults to False, for backward compatibility. -""")) +""" + ), +) user = cfg.StrOpt( 'user', - help=utils.fmt(""" + help=utils.fmt( + """ The user name of the administrator bind DN to use when querying the LDAP server, if your LDAP server requires it. -""")) +""" + ), +) password = cfg.StrOpt( 'password', secret=True, - help=utils.fmt(""" + help=utils.fmt( + """ The password of the administrator bind DN to use when querying the LDAP server, if your LDAP server requires it. -""")) +""" + ), +) suffix = cfg.StrOpt( 'suffix', default='cn=example,cn=com', - help=utils.fmt(""" + help=utils.fmt( + """ The default LDAP server suffix to use, if a DN is not defined via either `[ldap] user_tree_dn` or `[ldap] group_tree_dn`. -""")) +""" + ), +) query_scope = cfg.StrOpt( 'query_scope', default='one', choices=['one', 'sub'], - help=utils.fmt(""" + help=utils.fmt( + """ The search scope which defines how deep to search within the search base. A value of `one` (representing `oneLevel` or `singleLevel`) indicates a search of objects immediately below to the base object, but does not include the base object itself. A value of `sub` (representing `subtree` or `wholeSubtree`) indicates a search of both the base object itself and the entire subtree below it. -""")) +""" + ), +) page_size = cfg.IntOpt( 'page_size', default=0, min=0, - help=utils.fmt(""" + help=utils.fmt( + """ Defines the maximum number of results per page that keystone should request from the LDAP server when listing objects. A value of zero (`0`) disables paging. -""")) +""" + ), +) alias_dereferencing = cfg.StrOpt( 'alias_dereferencing', default='default', choices=['never', 'searching', 'always', 'finding', 'default'], - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP dereferencing option to use for queries involving aliases. A value of `default` falls back to using default dereferencing behavior configured by your `ldap.conf`. A value of `never` prevents aliases from being dereferenced at all. A value of `searching` dereferences aliases only after name resolution. A value of `finding` dereferences aliases only during name resolution. A value of `always` dereferences aliases in all cases. -""")) +""" + ), +) debug_level = cfg.IntOpt( 'debug_level', min=-1, - help=utils.fmt(""" + help=utils.fmt( + """ Sets the LDAP debugging level for LDAP calls. A value of 0 means that debugging is not enabled. This value is a bitmask, consult your LDAP documentation for possible values. -""")) +""" + ), +) chase_referrals = cfg.BoolOpt( 'chase_referrals', - help=utils.fmt(""" + help=utils.fmt( + """ Sets keystone's referral chasing behavior across directory partitions. If left unset, the system's default behavior will be used. -""")) +""" + ), +) user_tree_dn = cfg.StrOpt( 'user_tree_dn', - help=utils.fmt(""" + help=utils.fmt( + """ The search base to use for users. Defaults to `ou=Users` with the `[ldap] suffix` appended to it. -""")) +""" + ), +) user_filter = cfg.StrOpt( 'user_filter', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP search filter to use for users. -""")) +""" + ), +) user_objectclass = cfg.StrOpt( 'user_objectclass', default='inetOrgPerson', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP object class to use for users. -""")) +""" + ), +) user_id_attribute = cfg.StrOpt( 'user_id_attribute', default='cn', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to user IDs in keystone. This must NOT be a multivalued attribute. User IDs are expected to be globally unique across keystone domains and URL-safe. -""")) +""" + ), +) user_name_attribute = cfg.StrOpt( 'user_name_attribute', default='sn', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to user names in keystone. User names are expected to be unique only within a keystone domain and are not expected to be URL-safe. -""")) +""" + ), +) user_description_attribute = cfg.StrOpt( 'user_description_attribute', default='description', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to user descriptions in keystone. -""")) +""" + ), +) user_mail_attribute = cfg.StrOpt( 'user_mail_attribute', default='mail', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to user emails in keystone. -""")) +""" + ), +) user_pass_attribute = cfg.StrOpt( 'user_pass_attribute', default='userPassword', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to user passwords in keystone. -""")) +""" + ), +) user_enabled_attribute = cfg.StrOpt( 'user_enabled_attribute', default='enabled', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to the user enabled attribute in keystone. If setting this option to `userAccountControl`, then you may be interested in setting `[ldap] user_enabled_mask` and `[ldap] user_enabled_default` as well. -""")) +""" + ), +) user_enabled_invert = cfg.BoolOpt( 'user_enabled_invert', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ Logically negate the boolean value of the enabled attribute obtained from the LDAP server. Some LDAP servers use a boolean lock attribute where "true" means an account is disabled. Setting `[ldap] user_enabled_invert = true` will allow these lock attributes to be used. This option will have no effect if either the `[ldap] user_enabled_mask` or `[ldap] user_enabled_emulation` options are in use. -""")) +""" + ), +) user_enabled_mask = cfg.IntOpt( 'user_enabled_mask', default=0, min=0, - help=utils.fmt(""" + help=utils.fmt( + """ Bitmask integer to select which bit indicates the enabled value if the LDAP server represents "enabled" as a bit on an integer rather than as a discrete boolean. A value of `0` indicates that the mask is not used. If this is not set to `0` the typical value is `2`. This is typically used when `[ldap] user_enabled_attribute = userAccountControl`. Setting this option causes keystone to ignore the value of `[ldap] user_enabled_invert`. -""")) +""" + ), +) user_enabled_default = cfg.StrOpt( 'user_enabled_default', default='True', - help=utils.fmt(""" + help=utils.fmt( + """ The default value to enable users. This should match an appropriate integer value if the LDAP server uses non-boolean (bitmask) values to indicate if a user is enabled or disabled. If this is not set to `True`, then the typical value is `512`. This is typically used when `[ldap] user_enabled_attribute = userAccountControl`. -""")) +""" + ), +) user_attribute_ignore = cfg.ListOpt( 'user_attribute_ignore', default=['default_project_id'], - help=utils.fmt(""" + help=utils.fmt( + """ List of user attributes to ignore on create and update, or whether a specific user attribute should be filtered for list or show user. -""")) +""" + ), +) user_default_project_id_attribute = cfg.StrOpt( 'user_default_project_id_attribute', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to a user's default_project_id in keystone. This is most commonly used when keystone has write access to LDAP. -""")) +""" + ), +) user_enabled_emulation = cfg.BoolOpt( 'user_enabled_emulation', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ If enabled, keystone uses an alternative method to determine if a user is enabled or not by checking if they are a member of the group defined by the `[ldap] user_enabled_emulation_dn` option. Enabling this option causes keystone to ignore the value of `[ldap] user_enabled_invert`. -""")) +""" + ), +) user_enabled_emulation_dn = cfg.StrOpt( 'user_enabled_emulation_dn', - help=utils.fmt(""" + help=utils.fmt( + """ DN of the group entry to hold enabled users when using enabled emulation. Setting this option has no effect unless `[ldap] user_enabled_emulation` is also enabled. -""")) +""" + ), +) user_enabled_emulation_use_group_config = cfg.BoolOpt( 'user_enabled_emulation_use_group_config', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ Use the `[ldap] group_member_attribute` and `[ldap] group_objectclass` settings to determine membership in the emulated enabled group. Enabling this option has no effect unless `[ldap] user_enabled_emulation` is also enabled. -""")) +""" + ), +) user_additional_attribute_mapping = cfg.ListOpt( 'user_additional_attribute_mapping', default=[], - help=utils.fmt(""" + help=utils.fmt( + """ A list of LDAP attribute to keystone user attribute pairs used for mapping additional attributes to users in keystone. The expected format is `:`, where `ldap_attr` is the attribute in the LDAP object and `user_attr` is the attribute which should appear in the identity API. -""")) +""" + ), +) group_tree_dn = cfg.StrOpt( 'group_tree_dn', - help=utils.fmt(""" + help=utils.fmt( + """ The search base to use for groups. Defaults to `ou=UserGroups` with the `[ldap] suffix` appended to it. -""")) +""" + ), +) group_filter = cfg.StrOpt( 'group_filter', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP search filter to use for groups. -""")) +""" + ), +) group_objectclass = cfg.StrOpt( 'group_objectclass', default='groupOfNames', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP object class to use for groups. If setting this option to `posixGroup`, you may also be interested in enabling the `[ldap] group_members_are_ids` option. -""")) +""" + ), +) group_id_attribute = cfg.StrOpt( 'group_id_attribute', default='cn', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to group IDs in keystone. This must NOT be a multivalued attribute. Group IDs are expected to be globally unique across keystone domains and URL-safe. -""")) +""" + ), +) group_name_attribute = cfg.StrOpt( 'group_name_attribute', default='ou', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to group names in keystone. Group names are expected to be unique only within a keystone domain and are not expected to be URL-safe. -""")) +""" + ), +) group_member_attribute = cfg.StrOpt( 'group_member_attribute', default='member', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute used to indicate that a user is a member of the group. -""")) +""" + ), +) group_members_are_ids = cfg.BoolOpt( 'group_members_are_ids', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ Enable this option if the members of the group object class are keystone user IDs rather than LDAP DNs. This is the case when using `posixGroup` as the group object class in Open Directory. -""")) +""" + ), +) group_desc_attribute = cfg.StrOpt( 'group_desc_attribute', default='description', - help=utils.fmt(""" + help=utils.fmt( + """ The LDAP attribute mapped to group descriptions in keystone. -""")) +""" + ), +) group_attribute_ignore = cfg.ListOpt( 'group_attribute_ignore', default=[], - help=utils.fmt(""" + help=utils.fmt( + """ List of group attributes to ignore on create and update. or whether a specific group attribute should be filtered for list or show group. -""")) +""" + ), +) group_additional_attribute_mapping = cfg.ListOpt( 'group_additional_attribute_mapping', default=[], - help=utils.fmt(""" + help=utils.fmt( + """ A list of LDAP attribute to keystone group attribute pairs used for mapping additional attributes to groups in keystone. The expected format is `:`, where `ldap_attr` is the attribute in the LDAP object and `group_attr` is the attribute which should appear in the identity API. -""")) +""" + ), +) group_ad_nesting = cfg.BoolOpt( 'group_ad_nesting', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ If enabled, group queries will use Active Directory specific filters for nested groups. -""")) +""" + ), +) tls_cacertfile = cfg.StrOpt( 'tls_cacertfile', - help=utils.fmt(""" + help=utils.fmt( + """ An absolute path to a CA certificate file to use when communicating with LDAP servers. This option will take precedence over `[ldap] tls_cacertdir`, so there is no reason to set both. -""")) +""" + ), +) tls_cacertdir = cfg.StrOpt( 'tls_cacertdir', - help=utils.fmt(""" + help=utils.fmt( + """ An absolute path to a CA certificate directory to use when communicating with LDAP servers. There is no reason to set this option if you've also set `[ldap] tls_cacertfile`. -""")) +""" + ), +) use_tls = cfg.BoolOpt( 'use_tls', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ Enable TLS when communicating with LDAP servers. You should also set the `[ldap] tls_cacertfile` and `[ldap] tls_cacertdir` options when using this option. Do not set this option if you are using LDAP over SSL (LDAPS) instead of TLS. -""")) +""" + ), +) tls_req_cert = cfg.StrOpt( 'tls_req_cert', default='demand', choices=['demand', 'never', 'allow'], - help=utils.fmt(""" + help=utils.fmt( + """ Specifies which checks to perform against client certificates on incoming TLS sessions. If set to `demand`, then a certificate will always be requested and required from the LDAP server. If set to `allow`, then a certificate will always be requested but not required from the LDAP server. If set to `never`, then a certificate will never be requested. -""")) +""" + ), +) connection_timeout = cfg.IntOpt( 'connection_timeout', default=-1, min=-1, - help=utils.fmt(""" + help=utils.fmt( + """ The connection timeout to use with the LDAP server. A value of `-1` means that connections will never timeout. -""")) +""" + ), +) use_pool = cfg.BoolOpt( 'use_pool', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Enable LDAP connection pooling for queries to the LDAP server. There is typically no reason to disable this. -""")) +""" + ), +) pool_size = cfg.IntOpt( 'pool_size', default=10, min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The size of the LDAP connection pool. This option has no effect unless `[ldap] use_pool` is also enabled. -""")) +""" + ), +) pool_retry_max = cfg.IntOpt( 'pool_retry_max', default=3, min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The maximum number of times to attempt connecting to the LDAP server before aborting. A value of one makes only one connection attempt. This option has no effect unless `[ldap] use_pool` is also enabled. -""")) +""" + ), +) pool_retry_delay = cfg.FloatOpt( 'pool_retry_delay', default=0.1, - help=utils.fmt(""" + help=utils.fmt( + """ The number of seconds to wait before attempting to reconnect to the LDAP server. This option has no effect unless `[ldap] use_pool` is also enabled. -""")) +""" + ), +) pool_connection_timeout = cfg.IntOpt( 'pool_connection_timeout', default=-1, min=-1, - help=utils.fmt(""" + help=utils.fmt( + """ The connection timeout to use when pooling LDAP connections. A value of `-1` means that connections will never timeout. This option has no effect unless `[ldap] use_pool` is also enabled. -""")) +""" + ), +) pool_connection_lifetime = cfg.IntOpt( 'pool_connection_lifetime', default=600, min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The maximum connection lifetime to the LDAP server in seconds. When this lifetime is exceeded, the connection will be unbound and removed from the connection pool. This option has no effect unless `[ldap] use_pool` is also enabled. -""")) +""" + ), +) use_auth_pool = cfg.BoolOpt( 'use_auth_pool', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Enable LDAP connection pooling for end user authentication. There is typically no reason to disable this. -""")) +""" + ), +) auth_pool_size = cfg.IntOpt( 'auth_pool_size', default=100, min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The size of the connection pool to use for end user authentication. This option has no effect unless `[ldap] use_auth_pool` is also enabled. -""")) +""" + ), +) auth_pool_connection_lifetime = cfg.IntOpt( 'auth_pool_connection_lifetime', default=60, min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The maximum end user authentication connection lifetime to the LDAP server in seconds. When this lifetime is exceeded, the connection will be unbound and removed from the connection pool. This option has no effect unless `[ldap] use_auth_pool` is also enabled. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/oauth1.py b/keystone/conf/oauth1.py index 2e1b9f2af9..9bec96db93 100644 --- a/keystone/conf/oauth1.py +++ b/keystone/conf/oauth1.py @@ -18,32 +18,41 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the OAuth backend driver in the `keystone.oauth1` namespace. Typically, there is no reason to set this option unless you are providing a custom entry point. -""")) +""" + ), +) request_token_duration = cfg.IntOpt( 'request_token_duration', min=0, default=28800, - help=utils.fmt(""" + help=utils.fmt( + """ Number of seconds for the OAuth Request Token to remain valid after being created. This is the amount of time the user has to authorize the token. Setting this option to zero means that request tokens will last forever. -""")) +""" + ), +) access_token_duration = cfg.IntOpt( 'access_token_duration', min=0, default=86400, - help=utils.fmt(""" + help=utils.fmt( + """ Number of seconds for the OAuth Access Token to remain valid after being created. This is the amount of time the consumer has to interact with the service provider (which is typically keystone). Setting this option to zero means that access tokens will last forever. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/oauth2.py b/keystone/conf/oauth2.py index dbe26cf594..e07ab23adb 100644 --- a/keystone/conf/oauth2.py +++ b/keystone/conf/oauth2.py @@ -19,29 +19,32 @@ from keystone.conf import utils oauth2_authn_methods = cfg.ListOpt( 'oauth2_authn_methods', default=['tls_client_auth', 'client_secret_basic'], - help=utils.fmt(""" + help=utils.fmt( + """ The OAuth2.0 authentication method supported by the system when user obtains an access token through the OAuth2.0 token endpoint. This option can be set to certificate or secret. If the option is not set, the default value is certificate. When the option is set to secret, the OAuth2.0 token endpoint uses client_secret_basic method for authentication, otherwise tls_client_auth method is used for authentication. -""")) +""" + ), +) oauth2_cert_dn_mapping_id = cfg.StrOpt( 'oauth2_cert_dn_mapping_id', default='oauth2_mapping', - help=utils.fmt(""" + help=utils.fmt( + """ Used to define the mapping rule id. When not set, the mapping rule id is oauth2_mapping. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] -ALL_OPTS = [ - oauth2_authn_methods, - oauth2_cert_dn_mapping_id -] +ALL_OPTS = [oauth2_authn_methods, oauth2_cert_dn_mapping_id] def register_opts(conf): diff --git a/keystone/conf/opts.py b/keystone/conf/opts.py index f23b141840..5ff4d4de1f 100644 --- a/keystone/conf/opts.py +++ b/keystone/conf/opts.py @@ -71,9 +71,9 @@ def _import_modules(module_names): if not hasattr(module, LIST_OPTS_FUNC_NAME): raise Exception( "The module '%s' should have a '%s' function which " - "returns the config options." % ( - full_module_path, - LIST_OPTS_FUNC_NAME)) + "returns the config options." + % (full_module_path, LIST_OPTS_FUNC_NAME) + ) else: imported_modules.append(module) return imported_modules diff --git a/keystone/conf/policy.py b/keystone/conf/policy.py index e9dca0b866..b1cfa98e10 100644 --- a/keystone/conf/policy.py +++ b/keystone/conf/policy.py @@ -18,18 +18,24 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the policy backend driver in the `keystone.policy` namespace. Supplied drivers are `rules` (which does not support any CRUD operations for the v3 policy API) and `sql`. Typically, there is no reason to set this option unless you are providing a custom entry point. -""")) +""" + ), +) list_limit = cfg.IntOpt( 'list_limit', - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of entities that will be returned in a policy collection. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/receipt.py b/keystone/conf/receipt.py index e8d0357e91..f295d174c3 100644 --- a/keystone/conf/receipt.py +++ b/keystone/conf/receipt.py @@ -22,50 +22,65 @@ expiration = cfg.IntOpt( default=300, min=0, max=86400, - help=utils.fmt(""" + help=utils.fmt( + """ The amount of time that a receipt should remain valid (in seconds). This value should always be very short, as it represents how long a user has to reattempt auth with the missing auth methods. -""")) +""" + ), +) provider = cfg.StrOpt( 'provider', default='fernet', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the receipt provider in the `keystone.receipt.provider` namespace. The receipt provider controls the receipt construction and validation operations. Keystone includes just the `fernet` receipt provider for now. `fernet` receipts do not need to be persisted at all, but require that you run `keystone-manage fernet_setup` (also see the `keystone-manage fernet_rotate` command). -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for caching receipt creation and validation data. This has no effect unless global caching is enabled, or if cache_on_issue is disabled as we only cache receipts on issue. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', default=300, min=0, - help=utils.fmt(""" + help=utils.fmt( + """ The number of seconds to cache receipt creation and validation data. This has no effect unless both global and `[receipt] caching` are enabled. -""")) +""" + ), +) cache_on_issue = cfg.BoolOpt( 'cache_on_issue', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Enable storing issued receipt data to receipt validation cache so that first receipt validation doesn't actually cause full validation cycle. This option has no effect unless global caching and receipt caching are enabled. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/resource.py b/keystone/conf/resource.py index afed1c3b5a..b2d1f91a6f 100644 --- a/keystone/conf/resource.py +++ b/keystone/conf/resource.py @@ -18,47 +18,63 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the resource driver in the `keystone.resource` namespace. Only a `sql` driver is supplied by keystone. Unless you are writing proprietary drivers for keystone, you do not need to set this option. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, deprecated_opts=[cfg.DeprecatedOpt('caching', group='assignment')], - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for resource caching. This has no effect unless global caching is enabled. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', deprecated_opts=[cfg.DeprecatedOpt('cache_time', group='assignment')], - help=utils.fmt(""" + help=utils.fmt( + """ Time to cache resource data in seconds. This has no effect unless global caching is enabled. -""")) +""" + ), +) list_limit = cfg.IntOpt( 'list_limit', deprecated_opts=[cfg.DeprecatedOpt('list_limit', group='assignment')], - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of entities that will be returned in a resource collection. -""")) +""" + ), +) admin_project_domain_name = cfg.StrOpt( 'admin_project_domain_name', - help=utils.fmt(""" + help=utils.fmt( + """ Name of the domain that owns the `admin_project_name`. If left unset, then there is no admin project. `[resource] admin_project_name` must also be set to use this option. -""")) +""" + ), +) admin_project_name = cfg.StrOpt( 'admin_project_name', - help=utils.fmt(""" + help=utils.fmt( + """ This is a special project which represents cloud-level administrator privileges across services. Tokens scoped to this project will contain a true `is_admin_project` attribute to indicate to policy systems that the role @@ -66,31 +82,39 @@ assignments on that specific project should apply equally across every project. If left unset, then there is no admin project, and thus no explicit means of cross-project role assignments. `[resource] admin_project_domain_name` must also be set to use this option. -""")) +""" + ), +) project_name_url_safe = cfg.StrOpt( 'project_name_url_safe', choices=['off', 'new', 'strict'], default='off', - help=utils.fmt(""" + help=utils.fmt( + """ This controls whether the names of projects are restricted from containing URL-reserved characters. If set to `new`, attempts to create or update a project with a URL-unsafe name will fail. If set to `strict`, attempts to scope a token with a URL-unsafe project name will fail, thereby forcing all project names to be updated to be URL-safe. -""")) +""" + ), +) domain_name_url_safe = cfg.StrOpt( 'domain_name_url_safe', choices=['off', 'new', 'strict'], default='off', - help=utils.fmt(""" + help=utils.fmt( + """ This controls whether the names of domains are restricted from containing URL-reserved characters. If set to `new`, attempts to create or update a domain with a URL-unsafe name will fail. If set to `strict`, attempts to scope a token with a URL-unsafe domain name will fail, thereby forcing all domain names to be updated to be URL-safe. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/revoke.py b/keystone/conf/revoke.py index 7f91b51e86..6009d5f34f 100644 --- a/keystone/conf/revoke.py +++ b/keystone/conf/revoke.py @@ -18,38 +18,51 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the token revocation backend driver in the `keystone.revoke` namespace. Keystone only provides a `sql` driver, so there is no reason to set this option unless you are providing a custom entry point. -""")) +""" + ), +) expiration_buffer = cfg.IntOpt( 'expiration_buffer', default=1800, min=0, - help=utils.fmt(""" + help=utils.fmt( + """ The number of seconds after a token has expired before a corresponding revocation event may be purged from the backend. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for revocation event caching. This has no effect unless global caching is enabled. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', default=3600, deprecated_opts=[ - cfg.DeprecatedOpt('revocation_cache_time', group='token')], - help=utils.fmt(""" + cfg.DeprecatedOpt('revocation_cache_time', group='token') + ], + help=utils.fmt( + """ Time to cache the revocation list and the revocation events (in seconds). This has no effect unless global and `[revoke] caching` are both enabled. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/role.py b/keystone/conf/role.py index 569a0ec4e1..ef728d66b0 100644 --- a/keystone/conf/role.py +++ b/keystone/conf/role.py @@ -19,34 +19,46 @@ from keystone.conf import utils # driver is not specified, the assignment driver chooses the backend. driver = cfg.StrOpt( 'driver', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the role backend driver in the `keystone.role` namespace. Keystone only provides a `sql` driver, so there's no reason to change this unless you are providing a custom entry point. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for role caching. This has no effect unless global caching is enabled. In a typical deployment, there is no reason to disable this. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', - help=utils.fmt(""" + help=utils.fmt( + """ Time to cache role data, in seconds. This has no effect unless both global caching and `[role] caching` are enabled. -""")) +""" + ), +) list_limit = cfg.IntOpt( 'list_limit', - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of entities that will be returned in a role collection. This may be useful to tune if you have a large number of discrete roles in your deployment. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/saml.py b/keystone/conf/saml.py index f61a676647..1f92c3ba44 100644 --- a/keystone/conf/saml.py +++ b/keystone/conf/saml.py @@ -19,147 +19,201 @@ from keystone.conf import utils assertion_expiration_time = cfg.IntOpt( 'assertion_expiration_time', default=3600, - help=utils.fmt(""" + help=utils.fmt( + """ Determines the lifetime for any SAML assertions generated by keystone, using `NotOnOrAfter` attributes. -""")) +""" + ), +) xmlsec1_binary = cfg.StrOpt( 'xmlsec1_binary', default='xmlsec1', - help=utils.fmt(""" + help=utils.fmt( + """ Name of, or absolute path to, the binary to be used for XML signing. Although only the XML Security Library (`xmlsec1`) is supported, it may have a non-standard name or path on your system. If keystone cannot find the binary itself, you may need to install the appropriate package, use this option to specify an absolute path, or adjust keystone's PATH environment variable. -""")) +""" + ), +) certfile = cfg.StrOpt( 'certfile', default=constants._CERTFILE, - help=utils.fmt(""" + help=utils.fmt( + """ Absolute path to the public certificate file to use for SAML signing. The value cannot contain a comma (`,`). -""")) +""" + ), +) keyfile = cfg.StrOpt( 'keyfile', default=constants._KEYFILE, - help=utils.fmt(""" + help=utils.fmt( + """ Absolute path to the private key file to use for SAML signing. The value cannot contain a comma (`,`). -""")) +""" + ), +) idp_entity_id = cfg.URIOpt( 'idp_entity_id', max_length=1024, - help=utils.fmt(""" + help=utils.fmt( + """ This is the unique entity identifier of the identity provider (keystone) to use when generating SAML assertions. This value is required to generate identity provider metadata and must be a URI (a URL is recommended). For example: `https://keystone.example.com/v3/OS-FEDERATION/saml2/idp`. -""")) +""" + ), +) idp_sso_endpoint = cfg.URIOpt( 'idp_sso_endpoint', - help=utils.fmt(""" + help=utils.fmt( + """ This is the single sign-on (SSO) service location of the identity provider which accepts HTTP POST requests. A value is required to generate identity provider metadata. For example: `https://keystone.example.com/v3/OS-FEDERATION/saml2/sso`. -""")) +""" + ), +) idp_lang = cfg.StrOpt( 'idp_lang', default='en', - help=utils.fmt(""" + help=utils.fmt( + """ This is the language used by the identity provider's organization. -""")) +""" + ), +) idp_organization_name = cfg.StrOpt( 'idp_organization_name', default='SAML Identity Provider', - help=utils.fmt(""" + help=utils.fmt( + """ This is the name of the identity provider's organization. -""")) +""" + ), +) idp_organization_display_name = cfg.StrOpt( 'idp_organization_display_name', default='OpenStack SAML Identity Provider', - help=utils.fmt(""" + help=utils.fmt( + """ This is the name of the identity provider's organization to be displayed. -""")) +""" + ), +) idp_organization_url = cfg.URIOpt( 'idp_organization_url', default='https://example.com/', - help=utils.fmt(""" + help=utils.fmt( + """ This is the URL of the identity provider's organization. The URL referenced here should be useful to humans. -""")) +""" + ), +) idp_contact_company = cfg.StrOpt( 'idp_contact_company', default='Example, Inc.', - help=utils.fmt(""" + help=utils.fmt( + """ This is the company name of the identity provider's contact person. -""")) +""" + ), +) idp_contact_name = cfg.StrOpt( 'idp_contact_name', default='SAML Identity Provider Support', - help=utils.fmt(""" + help=utils.fmt( + """ This is the given name of the identity provider's contact person. -""")) +""" + ), +) idp_contact_surname = cfg.StrOpt( 'idp_contact_surname', default='Support', - help=utils.fmt(""" + help=utils.fmt( + """ This is the surname of the identity provider's contact person. -""")) +""" + ), +) idp_contact_email = cfg.StrOpt( 'idp_contact_email', default='support@example.com', - help=utils.fmt(""" + help=utils.fmt( + """ This is the email address of the identity provider's contact person. -""")) +""" + ), +) idp_contact_telephone = cfg.StrOpt( 'idp_contact_telephone', default='+1 800 555 0100', - help=utils.fmt(""" + help=utils.fmt( + """ This is the telephone number of the identity provider's contact person. -""")) +""" + ), +) idp_contact_type = cfg.StrOpt( 'idp_contact_type', default='other', choices=['technical', 'support', 'administrative', 'billing', 'other'], - help=utils.fmt(""" + help=utils.fmt( + """ This is the type of contact that best describes the identity provider's contact person. -""")) +""" + ), +) idp_metadata_path = cfg.StrOpt( 'idp_metadata_path', default='/etc/keystone/saml2_idp_metadata.xml', - help=utils.fmt(""" + help=utils.fmt( + """ Absolute path to the identity provider metadata file. This file should be generated with the `keystone-manage saml_idp_metadata` command. There is typically no reason to change this value. -""")) +""" + ), +) relay_state_prefix = cfg.StrOpt( 'relay_state_prefix', default='ss:mem:', - help=utils.fmt(""" + help=utils.fmt( + """ The prefix of the RelayState SAML attribute to use when generating enhanced client and proxy (ECP) assertions. In a typical deployment, there is no reason to change this value. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/security_compliance.py b/keystone/conf/security_compliance.py index 686a957d29..885377302c 100644 --- a/keystone/conf/security_compliance.py +++ b/keystone/conf/security_compliance.py @@ -18,19 +18,23 @@ from keystone.conf import utils disable_user_account_days_inactive = cfg.IntOpt( 'disable_user_account_days_inactive', min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The maximum number of days a user can go without authenticating before being considered "inactive" and automatically disabled (locked). This feature is disabled by default; set any value to enable it. This feature depends on the `sql` backend for the `[identity] driver`. When a user exceeds this threshold and is considered "inactive", the user's `enabled` attribute in the HTTP API may not match the value of the user's `enabled` column in the user table. -""")) +""" + ), +) lockout_failure_attempts = cfg.IntOpt( 'lockout_failure_attempts', min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The maximum number of times that a user can fail to authenticate before the user account is locked for the number of seconds specified by `[security_compliance] lockout_duration`. This feature is disabled by @@ -38,50 +42,62 @@ default. If this feature is enabled and `[security_compliance] lockout_duration` is not set, then users may be locked out indefinitely until the user is explicitly enabled via the API. This feature depends on the `sql` backend for the `[identity] driver`. -""")) +""" + ), +) lockout_duration = cfg.IntOpt( 'lockout_duration', default=1800, min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The number of seconds a user account will be locked when the maximum number of failed authentication attempts (as specified by `[security_compliance] lockout_failure_attempts`) is exceeded. Setting this option will have no effect unless you also set `[security_compliance] lockout_failure_attempts` to a non-zero value. This feature depends on the `sql` backend for the `[identity] driver`. -""")) +""" + ), +) password_expires_days = cfg.IntOpt( 'password_expires_days', min=1, - help=utils.fmt(""" + help=utils.fmt( + """ The number of days for which a password will be considered valid before requiring it to be changed. This feature is disabled by default. If enabled, new password changes will have an expiration date, however existing passwords would not be impacted. This feature depends on the `sql` backend for the `[identity] driver`. -""")) +""" + ), +) unique_last_password_count = cfg.IntOpt( 'unique_last_password_count', default=0, min=0, - help=utils.fmt(""" + help=utils.fmt( + """ This controls the number of previous user password iterations to keep in history, in order to enforce that newly created passwords are unique. The total number which includes the new password should not be greater or equal to this value. Setting the value to zero (the default) disables this feature. Thus, to enable this feature, values must be greater than 0. This feature depends on the `sql` backend for the `[identity] driver`. -""")) +""" + ), +) minimum_password_age = cfg.IntOpt( 'minimum_password_age', default=0, min=0, - help=utils.fmt(""" + help=utils.fmt( + """ The number of days that a password must be used before the user can change it. This prevents users from changing their passwords immediately in order to wipe out their password history and reuse an old password. This feature does not @@ -90,31 +106,40 @@ default and allows for immediate password changes. This feature depends on the `sql` backend for the `[identity] driver`. Note: If `[security_compliance] password_expires_days` is set, then the value for this option should be less than the `password_expires_days`. -""")) +""" + ), +) password_regex = cfg.StrOpt( 'password_regex', - help=utils.fmt(r""" + help=utils.fmt( + r""" The regular expression used to validate password strength requirements. By default, the regular expression will match any password. The following is an example of a pattern which requires at least 1 letter, 1 digit, and have a minimum length of 7 characters: ^(?=.*\\\d)(?=.*[a-zA-Z]).{7,}$ This feature depends on the `sql` backend for the `[identity] driver`. -""")) # noqa: W605 +""" + ), +) # noqa: W605 password_regex_description = cfg.StrOpt( 'password_regex_description', - help=utils.fmt(""" + help=utils.fmt( + """ Describe your password regular expression here in language for humans. If a password fails to match the regular expression, the contents of this configuration variable will be returned to users to explain why their requested password was insufficient. -""")) +""" + ), +) change_password_upon_first_use = cfg.BoolOpt( 'change_password_upon_first_use', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ Enabling this option requires users to change their password when the user is created, or upon administrative reset. Before accessing any services, affected users will have to change their password. To ignore this requirement for @@ -122,7 +147,9 @@ specific users, such as service users, set the `options` attribute `ignore_change_password_upon_first_use` to `True` for the desired user via the update user API. This feature is disabled by default. This feature is only applicable with the `sql` backend for the `[identity] driver`. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] @@ -135,7 +162,7 @@ ALL_OPTS = [ minimum_password_age, password_regex, password_regex_description, - change_password_upon_first_use + change_password_upon_first_use, ] diff --git a/keystone/conf/shadow_users.py b/keystone/conf/shadow_users.py index da1636ff65..5b27e38c60 100644 --- a/keystone/conf/shadow_users.py +++ b/keystone/conf/shadow_users.py @@ -18,13 +18,16 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the shadow users backend driver in the `keystone.identity.shadow_users` namespace. This driver is used for persisting local user references to externally-managed identities (via federation, LDAP, etc). Keystone only provides a `sql` driver, so there is no reason to change this option unless you are providing a custom entry point. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/token.py b/keystone/conf/token.py index 5cf9d717b6..33c343a054 100644 --- a/keystone/conf/token.py +++ b/keystone/conf/token.py @@ -22,7 +22,8 @@ expiration = cfg.IntOpt( default=3600, min=0, max=sys.maxsize, - help=utils.fmt(""" + help=utils.fmt( + """ The amount of time that a token should remain valid (in seconds). Drastically reducing this value may break "long-running" operations that involve multiple services to coordinate together, and will force users to authenticate with @@ -30,12 +31,15 @@ keystone more frequently. Drastically increasing this value will increase the number of tokens that will be simultaneously valid. Keystone tokens are also bearer tokens, so a shorter duration will also reduce the potential security impact of a compromised token. -""")) +""" + ), +) provider = cfg.StrOpt( 'provider', default='fernet', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the token provider in the `keystone.token.provider` namespace. The token provider controls the token construction, validation, and revocation operations. Supported upstream providers are `fernet` and `jws`. Neither @@ -47,76 +51,98 @@ validating token, which can be done with `keystone-manage create_jws_keypair`. Note that `fernet` tokens are encrypted and `jws` tokens are only signed. Please be sure to consider this if your deployment has security requirements regarding payload contents used to generate token IDs. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for caching token creation and validation data. This has no effect unless global caching is enabled. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', min=0, max=sys.maxsize, - help=utils.fmt(""" + help=utils.fmt( + """ The number of seconds to cache token creation and validation data. This has no effect unless both global and `[token] caching` are enabled. -""")) +""" + ), +) revoke_by_id = cfg.BoolOpt( 'revoke_by_id', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ This toggles support for revoking individual tokens by the token identifier and thus various token enumeration operations (such as listing all tokens issued to a specific user). These operations are used to determine the list of tokens to consider revoked. Do not disable this option if you're using the `kvs` `[revoke] driver`. -""")) +""" + ), +) allow_rescope_scoped_token = cfg.BoolOpt( 'allow_rescope_scoped_token', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ This toggles whether scoped tokens may be re-scoped to a new project or domain, thereby preventing users from exchanging a scoped token (including those with a default project scope) for any other token. This forces users to either authenticate for unscoped tokens (and later exchange that unscoped token for tokens with a more specific scope) or to provide their credentials in every request for a scoped token to avoid re-scoping altogether. -""")) +""" + ), +) cache_on_issue = cfg.BoolOpt( 'cache_on_issue', default=True, deprecated_since=versionutils.deprecated.STEIN, - deprecated_reason=utils.fmt(""" + deprecated_reason=utils.fmt( + """ Keystone already exposes a configuration option for caching tokens. Having a separate configuration option to cache tokens when they are issued is redundant, unnecessarily complicated, and is misleading if token caching is disabled because tokens will still be pre-cached by default when they are issued. The ability to pre-cache tokens when they are issued is going to rely exclusively on the ``keystone.conf [token] caching`` option in the future. -"""), +""" + ), deprecated_for_removal=True, - help=utils.fmt(""" + help=utils.fmt( + """ Enable storing issued token data to token validation cache so that first token validation doesn't actually cause full validation cycle. This option has no effect unless global caching is enabled and will still cache tokens even if `[token] caching = False`. -""")) +""" + ), +) allow_expired_window = cfg.IntOpt( 'allow_expired_window', default=48 * 60 * 60, - help=utils.fmt(""" + help=utils.fmt( + """ This controls the number of seconds that a token can be retrieved for beyond the built-in expiry time. This allows long running operations to succeed. Defaults to two days. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/tokenless_auth.py b/keystone/conf/tokenless_auth.py index 6ca5c67aee..4c7723b88e 100644 --- a/keystone/conf/tokenless_auth.py +++ b/keystone/conf/tokenless_auth.py @@ -18,7 +18,8 @@ from keystone.conf import utils trusted_issuer = cfg.MultiStrOpt( 'trusted_issuer', default=[], - help=utils.fmt(""" + help=utils.fmt( + """ The list of distinguished names which identify trusted issuers of client certificates allowed to use X.509 tokenless authorization. If the option is absent then no certificates will be allowed. The format for the values of a @@ -28,28 +29,36 @@ option may be repeated multiple times to represent multiple values. For example, keystone.conf would include two consecutive lines in order to trust two different DNs, such as `trusted_issuer = CN=john,OU=keystone,O=openstack` and `trusted_issuer = CN=mary,OU=eng,O=abc`. -""")) +""" + ), +) protocol = cfg.StrOpt( 'protocol', default='x509', - help=utils.fmt(""" + help=utils.fmt( + """ The federated protocol ID used to represent X.509 tokenless authorization. This is used in combination with the value of `[tokenless_auth] issuer_attribute` to find a corresponding federated mapping. In a typical deployment, there is no reason to change this value. -""")) +""" + ), +) issuer_attribute = cfg.StrOpt( 'issuer_attribute', default='SSL_CLIENT_I_DN', - help=utils.fmt(""" + help=utils.fmt( + """ The name of the WSGI environment variable used to pass the issuer of the client certificate to keystone. This attribute is used as an identity provider ID for the X.509 tokenless authorization along with the protocol to look up its corresponding mapping. In a typical deployment, there is no reason to change this value. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/totp.py b/keystone/conf/totp.py index d588433ec6..668a29d9eb 100644 --- a/keystone/conf/totp.py +++ b/keystone/conf/totp.py @@ -19,9 +19,12 @@ included_previous_windows = cfg.IntOpt( default=1, min=0, max=10, - help=utils.fmt(""" + help=utils.fmt( + """ The number of previous windows to check when processing TOTP passcodes. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/trust.py b/keystone/conf/trust.py index a382a49212..ed00f9b6e0 100644 --- a/keystone/conf/trust.py +++ b/keystone/conf/trust.py @@ -18,29 +18,38 @@ from keystone.conf import utils allow_redelegation = cfg.BoolOpt( 'allow_redelegation', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ Allows authorization to be redelegated from one user to another, effectively chaining trusts together. When disabled, the `remaining_uses` attribute of a trust is constrained to be zero. -""")) +""" + ), +) max_redelegation_count = cfg.IntOpt( 'max_redelegation_count', default=3, - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of times that authorization can be redelegated from one user to another in a chain of trusts. This number may be reduced further for a specific trust. -""")) +""" + ), +) driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the trust backend driver in the `keystone.trust` namespace. Keystone only provides a `sql` driver, so there is no reason to change this unless you are providing a custom entry point. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/unified_limit.py b/keystone/conf/unified_limit.py index fbbbd99ba8..6a662c4824 100644 --- a/keystone/conf/unified_limit.py +++ b/keystone/conf/unified_limit.py @@ -18,45 +18,60 @@ from keystone.conf import utils driver = cfg.StrOpt( 'driver', default='sql', - help=utils.fmt(""" + help=utils.fmt( + """ Entry point for the unified limit backend driver in the `keystone.unified_limit` namespace. Keystone only provides a `sql` driver, so there's no reason to change this unless you are providing a custom entry point. -""")) +""" + ), +) caching = cfg.BoolOpt( 'caching', default=True, - help=utils.fmt(""" + help=utils.fmt( + """ Toggle for unified limit caching. This has no effect unless global caching is enabled. In a typical deployment, there is no reason to disable this. -""")) +""" + ), +) cache_time = cfg.IntOpt( 'cache_time', - help=utils.fmt(""" + help=utils.fmt( + """ Time to cache unified limit data, in seconds. This has no effect unless both global caching and `[unified_limit] caching` are enabled. -""")) +""" + ), +) list_limit = cfg.IntOpt( 'list_limit', - help=utils.fmt(""" + help=utils.fmt( + """ Maximum number of entities that will be returned in a unified limit collection. This may be useful to tune if you have a large number of unified limits in your deployment. -""")) +""" + ), +) enforcement_model = cfg.StrOpt( 'enforcement_model', default='flat', choices=['flat', 'strict_two_level'], - help=utils.fmt(""" + help=utils.fmt( + """ The enforcement model to use when validating limits associated to projects. Enforcement models will behave differently depending on the existing limits, which may result in backwards incompatible changes if a model is switched in a running deployment. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] diff --git a/keystone/conf/wsgi.py b/keystone/conf/wsgi.py index a0e799fb37..1c9f52b48e 100644 --- a/keystone/conf/wsgi.py +++ b/keystone/conf/wsgi.py @@ -18,7 +18,8 @@ from keystone.conf import utils debug_middleware = cfg.BoolOpt( 'debug_middleware', default=False, - help=utils.fmt(""" + help=utils.fmt( + """ If set to true, this enables the oslo debug middleware in Keystone. This Middleware prints a lot of information about the request and the response. It is useful for getting information about the data on the wire (decoded) and @@ -34,7 +35,9 @@ and will show the data closest to the wire. WARNING: NOT INTENDED FOR USE IN PRODUCTION. THIS MIDDLEWARE CAN AND WILL EMIT SENSITIVE/PRIVILEGED DATA. -""")) +""" + ), +) GROUP_NAME = __name__.split('.')[-1] ALL_OPTS = [ diff --git a/keystone/credential/backends/base.py b/keystone/credential/backends/base.py index ac16058c7b..83ac492ac6 100644 --- a/keystone/credential/backends/base.py +++ b/keystone/credential/backends/base.py @@ -113,5 +113,6 @@ class CredentialDriverBase(object, metaclass=abc.ABCMeta): try: self.credential_api.delete_credential(cr['id']) except exception.CredentialNotFound: - LOG.debug('Deletion of credential is not required: %s', - cr['id']) + LOG.debug( + 'Deletion of credential is not required: %s', cr['id'] + ) diff --git a/keystone/credential/backends/sql.py b/keystone/credential/backends/sql.py index 36a3a3d27b..b23eec401d 100644 --- a/keystone/credential/backends/sql.py +++ b/keystone/credential/backends/sql.py @@ -24,11 +24,15 @@ from keystone import exception class CredentialModel(sql.ModelBase, sql.ModelDictMixinWithExtras): __tablename__ = 'credential' attributes = [ - 'id', 'user_id', 'project_id', 'encrypted_blob', 'type', 'key_hash' + 'id', + 'user_id', + 'project_id', + 'encrypted_blob', + 'type', + 'key_hash', ] id = sql.Column(sql.String(64), primary_key=True) - user_id = sql.Column(sql.String(64), - nullable=False) + user_id = sql.Column(sql.String(64), nullable=False) project_id = sql.Column(sql.String(64)) _encrypted_blob = sql.Column('encrypted_blob', sql.Text(), nullable=False) type = sql.Column(sql.String(255), nullable=False) @@ -64,8 +68,9 @@ class Credential(base.CredentialDriverBase): def list_credentials(self, hints): with sql.session_for_read() as session: credentials = session.query(CredentialModel) - credentials = sql.filter_limit_query(CredentialModel, - credentials, hints) + credentials = sql.filter_limit_query( + CredentialModel, credentials, hints + ) return [s.to_dict() for s in credentials] def list_credentials_for_user(self, user_id, type=None): diff --git a/keystone/credential/core.py b/keystone/credential/core.py index 78551dcd17..be8b96199e 100644 --- a/keystone/credential/core.py +++ b/keystone/credential/core.py @@ -77,9 +77,7 @@ class Manager(manager.Manager): ) else: encrypted_blob, key_hash = ( - PROVIDERS.credential_provider_api.encrypt( - credential['blob'] - ) + PROVIDERS.credential_provider_api.encrypt(credential['blob']) ) credential_copy['encrypted_blob'] = encrypted_blob credential_copy['key_hash'] = key_hash @@ -91,8 +89,7 @@ class Manager(manager.Manager): if user_limit >= 0: cred_count = len(self.list_credentials_for_user(user_id)) if cred_count >= user_limit: - raise exception.CredentialLimitExceeded( - limit=user_limit) + raise exception.CredentialLimitExceeded(limit=user_limit) @manager.response_truncated def list_credentials(self, hints=None): @@ -123,30 +120,24 @@ class Manager(manager.Manager): def _get_credential(self, credential_id): return self.driver.get_credential(credential_id) - def create_credential(self, credential_id, credential, - initiator=None): + def create_credential(self, credential_id, credential, initiator=None): """Create a credential.""" credential_copy = self._encrypt_credential(credential) user_id = credential_copy['user_id'] self._assert_limit_not_exceeded(user_id) ref = self.driver.create_credential(credential_id, credential_copy) if MEMOIZE.should_cache(ref): - self._get_credential.set(ref, - credential_copy, - credential_id) - self._list_credentials_for_user.invalidate(self, - ref['user_id'], - ref['type']) - self._list_credentials_for_user.invalidate(self, - ref['user_id'], - None) + self._get_credential.set(ref, credential_copy, credential_id) + self._list_credentials_for_user.invalidate( + self, ref['user_id'], ref['type'] + ) + self._list_credentials_for_user.invalidate( + self, ref['user_id'], None + ) ref.pop('key_hash', None) ref.pop('encrypted_blob', None) ref['blob'] = credential['blob'] - notifications.Audit.created( - self._CRED, - credential_id, - initiator) + notifications.Audit.created(self._CRED, credential_id, initiator) return ref def _validate_credential_update(self, credential_id, credential): @@ -154,12 +145,14 @@ class Manager(manager.Manager): # update, check the case where a non-ec2 credential changes its type # to be "ec2", but has no associated "project_id", either in the # request or already set in the database - if (credential.get('type', '').lower() == 'ec2' and - not credential.get('project_id')): + if credential.get('type', '').lower() == 'ec2' and not credential.get( + 'project_id' + ): existing_cred = self.get_credential(credential_id) if not existing_cred['project_id']: - raise exception.ValidationError(attribute='project_id', - target='credential') + raise exception.ValidationError( + attribute='project_id', target='credential' + ) def update_credential(self, credential_id, credential): """Update an existing credential.""" @@ -173,12 +166,12 @@ class Manager(manager.Manager): ref = self.driver.update_credential(credential_id, credential_copy) if MEMOIZE.should_cache(ref): self._get_credential.set(ref, self, credential_id) - self._list_credentials_for_user.invalidate(self, - ref['user_id'], - ref['type']) - self._list_credentials_for_user.invalidate(self, - ref['user_id'], - None) + self._list_credentials_for_user.invalidate( + self, ref['user_id'], ref['type'] + ) + self._list_credentials_for_user.invalidate( + self, ref['user_id'], None + ) ref.pop('key_hash', None) ref.pop('encrypted_blob', None) # If the update request contains a `blob` attribute - we should return @@ -190,20 +183,16 @@ class Manager(manager.Manager): ref['blob'] = existing_blob return ref - def delete_credential(self, credential_id, - initiator=None): + def delete_credential(self, credential_id, initiator=None): """Delete a credential.""" cred = self.get_credential(credential_id) self.driver.delete_credential(credential_id) self._get_credential.invalidate(self, credential_id) - self._list_credentials_for_user.invalidate(self, - cred['user_id'], - cred['type']) - self._list_credentials_for_user.invalidate(self, - cred['user_id'], - None) - notifications.Audit.deleted( - self._CRED, credential_id, initiator) + self._list_credentials_for_user.invalidate( + self, cred['user_id'], cred['type'] + ) + self._list_credentials_for_user.invalidate(self, cred['user_id'], None) + notifications.Audit.deleted(self._CRED, credential_id, initiator) def delete_credentials_for_project(self, project_id): """Delete all credentials for a project.""" @@ -214,12 +203,12 @@ class Manager(manager.Manager): self.driver.delete_credentials_for_project(project_id) for cred in creds: self._get_credential.invalidate(self, cred['id']) - self._list_credentials_for_user.invalidate(self, - cred['user_id'], - cred['type']) - self._list_credentials_for_user.invalidate(self, - cred['user_id'], - None) + self._list_credentials_for_user.invalidate( + self, cred['user_id'], cred['type'] + ) + self._list_credentials_for_user.invalidate( + self, cred['user_id'], None + ) def delete_credentials_for_user(self, user_id): """Delete all credentials for a user.""" @@ -227,9 +216,9 @@ class Manager(manager.Manager): self.driver.delete_credentials_for_user(user_id) for cred in creds: self._get_credential.invalidate(self, cred['id']) - self._list_credentials_for_user.invalidate(self, - user_id, - cred['type']) - self._list_credentials_for_user.invalidate(self, - cred['user_id'], - None) + self._list_credentials_for_user.invalidate( + self, user_id, cred['type'] + ) + self._list_credentials_for_user.invalidate( + self, cred['user_id'], None + ) diff --git a/keystone/credential/providers/fernet/core.py b/keystone/credential/providers/fernet/core.py index 99de106fb2..6b356ab2b1 100644 --- a/keystone/credential/providers/fernet/core.py +++ b/keystone/credential/providers/fernet/core.py @@ -42,8 +42,8 @@ MAX_ACTIVE_KEYS = 3 def get_multi_fernet_keys(): key_utils = fernet_utils.FernetUtils( - CONF.credential.key_repository, MAX_ACTIVE_KEYS, - 'credential') + CONF.credential.key_repository, MAX_ACTIVE_KEYS, 'credential' + ) keys = key_utils.load_keys(use_null_key=True) fernet_keys = [fernet.Fernet(key) for key in keys] @@ -77,12 +77,14 @@ class Provider(core.Provider): 'Encrypting credentials with the null key. Please properly ' 'encrypt credentials using `keystone-manage credential_setup`,' ' `keystone-manage credential_migrate`, and `keystone-manage ' - 'credential_rotate`') + 'credential_rotate`' + ) try: return ( crypto.encrypt(credential.encode('utf-8')), - primary_key_hash(keys)) + primary_key_hash(keys), + ) except (TypeError, ValueError) as e: msg = 'Credential could not be encrypted: %s' % str(e) tr_msg = _('Credential could not be encrypted: %s') % str(e) @@ -96,7 +98,8 @@ class Provider(core.Provider): :returns: a decrypted credential """ key_utils = fernet_utils.FernetUtils( - CONF.credential.key_repository, MAX_ACTIVE_KEYS, 'credential') + CONF.credential.key_repository, MAX_ACTIVE_KEYS, 'credential' + ) keys = key_utils.load_keys(use_null_key=True) fernet_keys = [fernet.Fernet(key) for key in keys] crypto = fernet.MultiFernet(fernet_keys) @@ -106,9 +109,13 @@ class Provider(core.Provider): credential = credential.encode('utf-8') return crypto.decrypt(credential).decode('utf-8') except (fernet.InvalidToken, TypeError, ValueError): - msg = ('Credential could not be decrypted. Please contact the ' - 'administrator') - tr_msg = _('Credential could not be decrypted. Please contact the ' - 'administrator') + msg = ( + 'Credential could not be decrypted. Please contact the ' + 'administrator' + ) + tr_msg = _( + 'Credential could not be decrypted. Please contact the ' + 'administrator' + ) LOG.error(msg) raise exception.CredentialEncryptionError(tr_msg) diff --git a/keystone/credential/schema.py b/keystone/credential/schema.py index 749f0c0aff..a5cdb088df 100644 --- a/keystone/credential/schema.py +++ b/keystone/credential/schema.py @@ -12,18 +12,10 @@ _credential_properties = { - 'blob': { - 'type': 'string' - }, - 'project_id': { - 'type': 'string' - }, - 'type': { - 'type': 'string' - }, - 'user_id': { - 'type': 'string' - } + 'blob': {'type': 'string'}, + 'project_id': {'type': 'string'}, + 'type': {'type': 'string'}, + 'user_id': {'type': 'string'}, } credential_create = { @@ -34,29 +26,19 @@ credential_create = { { 'title': 'ec2 credential requires project_id', 'required': ['blob', 'type', 'user_id', 'project_id'], - 'properties': { - 'type': { - 'enum': ['ec2'] - } - } + 'properties': {'type': {'enum': ['ec2']}}, }, { 'title': 'non-ec2 credential does not require project_id', 'required': ['blob', 'type', 'user_id'], - 'properties': { - 'type': { - 'not': { - 'enum': ['ec2'] - } - } - } - } - ] + 'properties': {'type': {'not': {'enum': ['ec2']}}}, + }, + ], } credential_update = { 'type': 'object', 'properties': _credential_properties, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } diff --git a/keystone/endpoint_policy/backends/base.py b/keystone/endpoint_policy/backends/base.py index 5ff414c1aa..7b5db48c55 100644 --- a/keystone/endpoint_policy/backends/base.py +++ b/keystone/endpoint_policy/backends/base.py @@ -19,8 +19,9 @@ class EndpointPolicyDriverBase(object, metaclass=abc.ABCMeta): """Interface description for an Endpoint Policy driver.""" @abc.abstractmethod - def create_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def create_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): """Create a policy association. :param policy_id: identity of policy that is being associated @@ -43,8 +44,9 @@ class EndpointPolicyDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def check_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def check_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): """Check existence of a policy association. :param policy_id: identity of policy that is being associated @@ -63,8 +65,9 @@ class EndpointPolicyDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def delete_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def delete_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): """Delete a policy association. :param policy_id: identity of policy that is being associated @@ -81,8 +84,9 @@ class EndpointPolicyDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def get_policy_association(self, endpoint_id=None, - service_id=None, region_id=None): + def get_policy_association( + self, endpoint_id=None, service_id=None, region_id=None + ): """Get the policy for an explicit association. This method is not exposed as a public API, but is used by diff --git a/keystone/endpoint_policy/backends/sql.py b/keystone/endpoint_policy/backends/sql.py index 7627375f19..a18d624a17 100644 --- a/keystone/endpoint_policy/backends/sql.py +++ b/keystone/endpoint_policy/backends/sql.py @@ -32,8 +32,9 @@ class PolicyAssociation(sql.ModelBase, sql.ModelDictMixin): endpoint_id = sql.Column(sql.String(64), nullable=True) service_id = sql.Column(sql.String(64), nullable=True) region_id = sql.Column(sql.String(64), nullable=True) - __table_args__ = (sql.UniqueConstraint('endpoint_id', 'service_id', - 'region_id'),) + __table_args__ = ( + sql.UniqueConstraint('endpoint_id', 'service_id', 'region_id'), + ) def to_dict(self): """Return the model's attributes as a dictionary. @@ -50,8 +51,9 @@ class PolicyAssociation(sql.ModelBase, sql.ModelDictMixin): class EndpointPolicy(base.EndpointPolicyDriverBase): - def create_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def create_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): with sql.session_for_write() as session: try: # See if there is already a row for this association, and if @@ -63,30 +65,40 @@ class EndpointPolicy(base.EndpointPolicyDriverBase): association = query.one() association.policy_id = policy_id except sql.NotFound: - association = PolicyAssociation(id=uuid.uuid4().hex, - policy_id=policy_id, - endpoint_id=endpoint_id, - service_id=service_id, - region_id=region_id) + association = PolicyAssociation( + id=uuid.uuid4().hex, + policy_id=policy_id, + endpoint_id=endpoint_id, + service_id=service_id, + region_id=region_id, + ) session.add(association) - def check_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def check_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): sql_constraints = sqlalchemy.and_( PolicyAssociation.policy_id == policy_id, PolicyAssociation.endpoint_id == endpoint_id, PolicyAssociation.service_id == service_id, - PolicyAssociation.region_id == region_id) + PolicyAssociation.region_id == region_id, + ) # NOTE(henry-nash): Getting a single value to save object # management overhead. with sql.session_for_read() as session: - if session.query(PolicyAssociation.id).filter( - sql_constraints).distinct().count() == 0: + if ( + session.query(PolicyAssociation.id) + .filter(sql_constraints) + .distinct() + .count() + == 0 + ): raise exception.PolicyAssociationNotFound() - def delete_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def delete_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): with sql.session_for_write() as session: query = session.query(PolicyAssociation) query = query.filter_by(policy_id=policy_id) @@ -95,17 +107,23 @@ class EndpointPolicy(base.EndpointPolicyDriverBase): query = query.filter_by(region_id=region_id) query.delete() - def get_policy_association(self, endpoint_id=None, - service_id=None, region_id=None): + def get_policy_association( + self, endpoint_id=None, service_id=None, region_id=None + ): sql_constraints = sqlalchemy.and_( PolicyAssociation.endpoint_id == endpoint_id, PolicyAssociation.service_id == service_id, - PolicyAssociation.region_id == region_id) + PolicyAssociation.region_id == region_id, + ) try: with sql.session_for_read() as session: - policy_id = session.query(PolicyAssociation.policy_id).filter( - sql_constraints).distinct().one() + policy_id = ( + session.query(PolicyAssociation.policy_id) + .filter(sql_constraints) + .distinct() + .one() + ) return {'policy_id': policy_id} except sql.NotFound: raise exception.PolicyAssociationNotFound() diff --git a/keystone/endpoint_policy/core.py b/keystone/endpoint_policy/core.py index 7d640cce39..c28df895cf 100644 --- a/keystone/endpoint_policy/core.py +++ b/keystone/endpoint_policy/core.py @@ -50,37 +50,52 @@ class Manager(manager.Manager): - Service (in which case endpoint and region must be None) """ - if (endpoint_id is not None and - service_id is None and region_id is None): + if ( + endpoint_id is not None + and service_id is None + and region_id is None + ): return - if (service_id is not None and region_id is not None and - endpoint_id is None): + if ( + service_id is not None + and region_id is not None + and endpoint_id is None + ): return - if (service_id is not None and - endpoint_id is None and region_id is None): + if ( + service_id is not None + and endpoint_id is None + and region_id is None + ): return - raise exception.InvalidPolicyAssociation(endpoint_id=endpoint_id, - service_id=service_id, - region_id=region_id) + raise exception.InvalidPolicyAssociation( + endpoint_id=endpoint_id, service_id=service_id, region_id=region_id + ) - def create_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def create_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.create_policy_association(policy_id, endpoint_id, - service_id, region_id) + self.driver.create_policy_association( + policy_id, endpoint_id, service_id, region_id + ) - def check_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def check_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.check_policy_association(policy_id, endpoint_id, - service_id, region_id) + self.driver.check_policy_association( + policy_id, endpoint_id, service_id, region_id + ) - def delete_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): + def delete_policy_association( + self, policy_id, endpoint_id=None, service_id=None, region_id=None + ): self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.delete_policy_association(policy_id, endpoint_id, - service_id, region_id) + self.driver.delete_policy_association( + policy_id, endpoint_id, service_id, region_id + ) def list_endpoints_for_policy(self, policy_id): @@ -88,10 +103,13 @@ class Manager(manager.Manager): try: return PROVIDERS.catalog_api.get_endpoint(endpoint_id) except exception.EndpointNotFound: - msg = ('Endpoint %(endpoint_id)s referenced in ' - 'association for policy %(policy_id)s not found.') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': endpoint_id}) + msg = ( + 'Endpoint %(endpoint_id)s referenced in ' + 'association for policy %(policy_id)s not found.' + ) + LOG.warning( + msg, {'policy_id': policy_id, 'endpoint_id': endpoint_id} + ) raise def _get_endpoints_for_service(service_id, endpoints): @@ -100,14 +118,20 @@ class Manager(manager.Manager): return [ep for ep in endpoints if ep['service_id'] == service_id] def _get_endpoints_for_service_and_region( - service_id, region_id, endpoints, regions): + service_id, region_id, endpoints, regions + ): # TODO(henry-nash): Consider optimizing this in the future. # The lack of a two-way pointer in the region tree structure # makes this somewhat inefficient. def _recursively_get_endpoints_for_region( - region_id, service_id, endpoint_list, region_list, - endpoints_found, regions_examined): + region_id, + service_id, + endpoint_list, + region_list, + endpoints_found, + regions_examined, + ): """Recursively search down a region tree for endpoints. :param region_id: the point in the tree to examine @@ -125,30 +149,44 @@ class Manager(manager.Manager): """ if region_id in regions_examined: - msg = ('Circular reference or a repeated entry found ' - 'in region tree - %(region_id)s.') + msg = ( + 'Circular reference or a repeated entry found ' + 'in region tree - %(region_id)s.' + ) LOG.error(msg, {'region_id': ref.region_id}) return regions_examined.append(region_id) - endpoints_found += ( - [ep for ep in endpoint_list if - ep['service_id'] == service_id and - ep['region_id'] == region_id]) + endpoints_found += [ + ep + for ep in endpoint_list + if ep['service_id'] == service_id + and ep['region_id'] == region_id + ] for region in region_list: if region['parent_region_id'] == region_id: _recursively_get_endpoints_for_region( - region['id'], service_id, endpoints, regions, - endpoints_found, regions_examined) + region['id'], + service_id, + endpoints, + regions, + endpoints_found, + regions_examined, + ) endpoints_found = [] regions_examined = [] # Now walk down the region tree _recursively_get_endpoints_for_region( - region_id, service_id, endpoints, regions, - endpoints_found, regions_examined) + region_id, + service_id, + endpoints, + regions, + endpoints_found, + regions_examined, + ) return endpoints_found @@ -158,30 +196,42 @@ class Manager(manager.Manager): for ref in self.list_associations_for_policy(policy_id): if ref.get('endpoint_id') is not None: matching_endpoints.append( - _get_endpoint(ref['endpoint_id'], policy_id)) + _get_endpoint(ref['endpoint_id'], policy_id) + ) continue - if (ref.get('service_id') is not None and - ref.get('region_id') is None): + if ( + ref.get('service_id') is not None + and ref.get('region_id') is None + ): matching_endpoints += _get_endpoints_for_service( - ref['service_id'], endpoints) + ref['service_id'], endpoints + ) continue - if (ref.get('service_id') is not None and - ref.get('region_id') is not None): - matching_endpoints += ( - _get_endpoints_for_service_and_region( - ref['service_id'], ref['region_id'], - endpoints, regions)) + if ( + ref.get('service_id') is not None + and ref.get('region_id') is not None + ): + matching_endpoints += _get_endpoints_for_service_and_region( + ref['service_id'], ref['region_id'], endpoints, regions + ) continue - msg = ('Unsupported policy association found - ' - 'Policy %(policy_id)s, Endpoint %(endpoint_id)s, ' - 'Service %(service_id)s, Region %(region_id)s, ') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': ref['endpoint_id'], - 'service_id': ref['service_id'], - 'region_id': ref['region_id']}) + msg = ( + 'Unsupported policy association found - ' + 'Policy %(policy_id)s, Endpoint %(endpoint_id)s, ' + 'Service %(service_id)s, Region %(region_id)s, ' + ) + LOG.warning( + msg, + { + 'policy_id': policy_id, + 'endpoint_id': ref['endpoint_id'], + 'service_id': ref['service_id'], + 'region_id': ref['region_id'], + }, + ) return matching_endpoints @@ -191,10 +241,13 @@ class Manager(manager.Manager): try: return PROVIDERS.policy_api.get_policy(policy_id) except exception.PolicyNotFound: - msg = ('Policy %(policy_id)s referenced in association ' - 'for endpoint %(endpoint_id)s not found.') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': endpoint_id}) + msg = ( + 'Policy %(policy_id)s referenced in association ' + 'for endpoint %(endpoint_id)s not found.' + ) + LOG.warning( + msg, {'policy_id': policy_id, 'endpoint_id': endpoint_id} + ) raise def _look_for_policy_for_region_and_service(endpoint): @@ -210,8 +263,8 @@ class Manager(manager.Manager): while region_id is not None: try: ref = self.get_policy_association( - service_id=endpoint['service_id'], - region_id=region_id) + service_id=endpoint['service_id'], region_id=region_id + ) return ref['policy_id'] except exception.PolicyAssociationNotFound: # nosec # There wasn't one for that region & service, handle below. @@ -225,8 +278,10 @@ class Manager(manager.Manager): if region.get('parent_region_id') is not None: region_id = region['parent_region_id'] if region_id in regions_examined: - msg = ('Circular reference or a repeated entry ' - 'found in region tree - %(region_id)s.') + msg = ( + 'Circular reference or a repeated entry ' + 'found in region tree - %(region_id)s.' + ) LOG.error(msg, {'region_id': region_id}) break @@ -252,12 +307,14 @@ class Manager(manager.Manager): # Finally, just check if there is one for the service. try: ref = self.get_policy_association( - service_id=endpoint['service_id']) + service_id=endpoint['service_id'] + ) return _get_policy(ref['policy_id'], endpoint_id) except exception.PolicyAssociationNotFound: # nosec # No policy is associated with endpoint, handled below. pass - msg = _('No policy is associated with endpoint ' - '%(endpoint_id)s.') % {'endpoint_id': endpoint_id} + msg = _( + 'No policy is associated with endpoint ' '%(endpoint_id)s.' + ) % {'endpoint_id': endpoint_id} raise exception.NotFound(msg) diff --git a/keystone/exception.py b/keystone/exception.py index 82415fa953..8070fd463f 100644 --- a/keystone/exception.py +++ b/keystone/exception.py @@ -34,8 +34,7 @@ def _format_with_unicode_kwargs(msg_format, kwargs): return msg_format % kwargs except UnicodeDecodeError: try: - kwargs = {k: encodeutils.safe_decode(v) - for k, v in kwargs.items()} + kwargs = {k: encodeutils.safe_decode(v) for k, v in kwargs.items()} except UnicodeDecodeError: # NOTE(jamielennox): This is the complete failure case # at least by showing the template we have some idea @@ -96,17 +95,20 @@ class Error(Exception, metaclass=_KeystoneExceptionMeta): class ValidationError(Error): - message_format = _("Expecting to find %(attribute)s in %(target)s." - " The server could not comply with the request" - " since it is either malformed or otherwise" - " incorrect. The client is assumed to be in error.") + message_format = _( + "Expecting to find %(attribute)s in %(target)s." + " The server could not comply with the request" + " since it is either malformed or otherwise" + " incorrect. The client is assumed to be in error." + ) code = int(http.client.BAD_REQUEST) title = http.client.responses[http.client.BAD_REQUEST] class URLValidationError(ValidationError): - message_format = _("Cannot create an endpoint with an invalid URL:" - " %(url)s.") + message_format = _( + "Cannot create an endpoint with an invalid URL:" " %(url)s." + ) class PasswordValidationError(ValidationError): @@ -114,31 +116,38 @@ class PasswordValidationError(ValidationError): class PasswordRequirementsValidationError(PasswordValidationError): - message_format = _("The password does not match the requirements:" - " %(detail)s.") + message_format = _( + "The password does not match the requirements:" " %(detail)s." + ) class PasswordHistoryValidationError(PasswordValidationError): - message_format = _("The new password cannot be identical to a " - "previous password. The total number which " - "includes the new password must be unique is " - "%(unique_count)s.") + message_format = _( + "The new password cannot be identical to a " + "previous password. The total number which " + "includes the new password must be unique is " + "%(unique_count)s." + ) class PasswordAgeValidationError(PasswordValidationError): - message_format = _("You cannot change your password at this time due " - "to the minimum password age. Once you change your " - "password, it must be used for %(min_age_days)d day(s) " - "before it can be changed. Please try again in " - "%(days_left)d day(s) or contact your administrator to " - "reset your password.") + message_format = _( + "You cannot change your password at this time due " + "to the minimum password age. Once you change your " + "password, it must be used for %(min_age_days)d day(s) " + "before it can be changed. Please try again in " + "%(days_left)d day(s) or contact your administrator to " + "reset your password." + ) class PasswordSelfServiceDisabled(PasswordValidationError): - message_format = _("You cannot change your password at this time due " - "to password policy disallowing password changes. " - "Please contact your administrator to reset your " - "password.") + message_format = _( + "You cannot change your password at this time due " + "to password policy disallowing password changes. " + "Please contact your administrator to reset your " + "password." + ) class SchemaValidationError(ValidationError): @@ -148,39 +157,49 @@ class SchemaValidationError(ValidationError): class ValidationTimeStampError(Error): - message_format = _("Timestamp not in expected format." - " The server could not comply with the request" - " since it is either malformed or otherwise" - " incorrect. The client is assumed to be in error.") + message_format = _( + "Timestamp not in expected format." + " The server could not comply with the request" + " since it is either malformed or otherwise" + " incorrect. The client is assumed to be in error." + ) code = int(http.client.BAD_REQUEST) title = http.client.responses[http.client.BAD_REQUEST] class InvalidOperatorError(ValidationError): - message_format = _("The given operator %(_op)s is not valid." - " It must be one of the following:" - " 'eq', 'neq', 'lt', 'lte', 'gt', or 'gte'.") + message_format = _( + "The given operator %(_op)s is not valid." + " It must be one of the following:" + " 'eq', 'neq', 'lt', 'lte', 'gt', or 'gte'." + ) class ValidationExpirationError(Error): - message_format = _("The 'expires_at' must not be before now." - " The server could not comply with the request" - " since it is either malformed or otherwise" - " incorrect. The client is assumed to be in error.") + message_format = _( + "The 'expires_at' must not be before now." + " The server could not comply with the request" + " since it is either malformed or otherwise" + " incorrect. The client is assumed to be in error." + ) code = int(http.client.BAD_REQUEST) title = http.client.responses[http.client.BAD_REQUEST] class StringLengthExceeded(ValidationError): - message_format = _("String length exceeded. The length of" - " string '%(string)s' exceeds the limit" - " of column %(type)s(CHAR(%(length)d)).") + message_format = _( + "String length exceeded. The length of" + " string '%(string)s' exceeds the limit" + " of column %(type)s(CHAR(%(length)d))." + ) class AmbiguityError(ValidationError): - message_format = _("There are multiple %(resource)s entities named" - " '%(name)s'. Please use ID instead of names to" - " resolve the ambiguity.") + message_format = _( + "There are multiple %(resource)s entities named" + " '%(name)s'. Please use ID instead of names to" + " resolve the ambiguity." + ) class ApplicationCredentialValidationError(ValidationError): @@ -188,8 +207,10 @@ class ApplicationCredentialValidationError(ValidationError): class CircularRegionHierarchyError(Error): - message_format = _("The specified parent region %(parent_region_id)s " - "would create a circular region hierarchy.") + message_format = _( + "The specified parent region %(parent_region_id)s " + "would create a circular region hierarchy." + ) code = int(http.client.BAD_REQUEST) title = http.client.responses[http.client.BAD_REQUEST] @@ -207,24 +228,32 @@ class ForbiddenNotSecurity(Error): class PasswordVerificationError(ForbiddenNotSecurity): - message_format = _("The password length must be less than or equal " - "to %(size)i. The server could not comply with the " - "request because the password is invalid.") + message_format = _( + "The password length must be less than or equal " + "to %(size)i. The server could not comply with the " + "request because the password is invalid." + ) class RegionDeletionError(ForbiddenNotSecurity): - message_format = _("Unable to delete region %(region_id)s because it or " - "its child regions have associated endpoints.") + message_format = _( + "Unable to delete region %(region_id)s because it or " + "its child regions have associated endpoints." + ) class ApplicationCredentialLimitExceeded(ForbiddenNotSecurity): - message_format = _("Unable to create additional application credentials, " - "maximum of %(limit)d already exceeded for user.") + message_format = _( + "Unable to create additional application credentials, " + "maximum of %(limit)d already exceeded for user." + ) class CredentialLimitExceeded(ForbiddenNotSecurity): - message_format = _("Unable to create additional credentials, maximum " - "of %(limit)d already exceeded for user.") + message_format = _( + "Unable to create additional credentials, maximum " + "of %(limit)d already exceeded for user." + ) class SecurityError(Error): @@ -275,7 +304,8 @@ class SecurityError(Error): message = _format_with_unicode_kwargs(message, kwargs) return _('%(message)s %(amendment)s') % { 'message': message, - 'amendment': self.amendment} + 'amendment': self.amendment, + } return _format_with_unicode_kwargs(self.message_format, kwargs) @@ -289,15 +319,18 @@ class Unauthorized(SecurityError): class InsufficientAuthMethods(Error): # NOTE(adriant): This is an internal only error that is built into # an auth receipt response. - message_format = _("Insufficient auth methods received for %(user_id)s. " - "Auth Methods Provided: %(methods)s.") + message_format = _( + "Insufficient auth methods received for %(user_id)s. " + "Auth Methods Provided: %(methods)s." + ) code = 401 title = 'Unauthorized' def __init__(self, message=None, user_id=None, methods=None): methods_str = '[%s]' % ','.join(methods) super(InsufficientAuthMethods, self).__init__( - message, user_id=user_id, methods=methods_str) + message, user_id=user_id, methods=methods_str + ) self.user_id = user_id self.methods = methods @@ -308,8 +341,10 @@ class ReceiptNotFound(Unauthorized): class PasswordExpired(Unauthorized): - message_format = _("The password is expired and needs to be changed for " - "user: %(user_id)s.") + message_format = _( + "The password is expired and needs to be changed for " + "user: %(user_id)s." + ) class AuthPluginException(Unauthorized): @@ -338,7 +373,8 @@ class AuthMethodNotSupported(AuthPluginException): class ApplicationCredentialAuthError(AuthPluginException): message_format = _( - "Error authenticating with application credential: %(detail)s") + "Error authenticating with application credential: %(detail)s" + ) class AdditionalAuthRequired(AuthPluginException): @@ -350,28 +386,35 @@ class AdditionalAuthRequired(AuthPluginException): class Forbidden(SecurityError): - message_format = _("You are not authorized to perform the" - " requested action.") + message_format = _( + "You are not authorized to perform the" " requested action." + ) code = int(http.client.FORBIDDEN) title = http.client.responses[http.client.FORBIDDEN] class ForbiddenAction(Forbidden): - message_format = _("You are not authorized to perform the" - " requested action: %(action)s.") + message_format = _( + "You are not authorized to perform the" + " requested action: %(action)s." + ) class CrossBackendNotAllowed(Forbidden): - message_format = _("Group membership across backend boundaries is not " - "allowed. Group in question is %(group_id)s, " - "user is %(user_id)s.") + message_format = _( + "Group membership across backend boundaries is not " + "allowed. Group in question is %(group_id)s, " + "user is %(user_id)s." + ) class InvalidPolicyAssociation(Forbidden): - message_format = _("Invalid mix of entities for policy association: " - "only Endpoint, Service, or Region+Service allowed. " - "Request was - Endpoint: %(endpoint_id)s, " - "Service: %(service_id)s, Region: %(region_id)s.") + message_format = _( + "Invalid mix of entities for policy association: " + "only Endpoint, Service, or Region+Service allowed. " + "Request was - Endpoint: %(endpoint_id)s, " + "Service: %(service_id)s, Region: %(region_id)s." + ) class InvalidDomainConfig(Forbidden): @@ -384,12 +427,15 @@ class InvalidLimit(Forbidden): class LimitTreeExceedError(Exception): def __init__(self, project_id, max_limit_depth): - super(LimitTreeExceedError, self).__init__(_( - "Keystone cannot start due to project hierarchical depth in the " - "current deployment (project_ids: %(project_id)s) exceeds the " - "enforcement model's maximum limit of %(max_limit_depth)s. Please " - "use a different enforcement model to correct the issue." - ) % {'project_id': project_id, 'max_limit_depth': max_limit_depth}) + super(LimitTreeExceedError, self).__init__( + _( + "Keystone cannot start due to project hierarchical depth in the " + "current deployment (project_ids: %(project_id)s) exceeds the " + "enforcement model's maximum limit of %(max_limit_depth)s. Please " + "use a different enforcement model to correct the issue." + ) + % {'project_id': project_id, 'max_limit_depth': max_limit_depth} + ) class NotFound(Error): @@ -423,13 +469,17 @@ class InvalidImpliedRole(Forbidden): class DomainSpecificRoleMismatch(Forbidden): - message_format = _("Project %(project_id)s must be in the same domain " - "as the role %(role_id)s being assigned.") + message_format = _( + "Project %(project_id)s must be in the same domain " + "as the role %(role_id)s being assigned." + ) class DomainSpecificRoleNotWithinIdPDomain(Forbidden): - message_format = _("role: %(role_name)s must be within the same domain as " - "the identity provider: %(identity_provider)s.") + message_format = _( + "role: %(role_name)s must be within the same domain as " + "the identity provider: %(identity_provider)s." + ) class DomainIdInvalid(ValidationError): @@ -437,9 +487,11 @@ class DomainIdInvalid(ValidationError): class RoleAssignmentNotFound(NotFound): - message_format = _("Could not find role assignment with role: " - "%(role_id)s, user or group: %(actor_id)s, " - "project, domain, or system: %(target_id)s.") + message_format = _( + "Could not find role assignment with role: " + "%(role_id)s, user or group: %(actor_id)s, " + "project, domain, or system: %(target_id)s." + ) class RegionNotFound(NotFound): @@ -507,8 +559,10 @@ class ServiceProviderNotFound(NotFound): class FederatedProtocolNotFound(NotFound): - message_format = _("Could not find federated protocol %(protocol_id)s for" - " Identity Provider: %(idp_id)s.") + message_format = _( + "Could not find federated protocol %(protocol_id)s for" + " Identity Provider: %(idp_id)s." + ) class PublicIDNotFound(NotFound): @@ -526,18 +580,24 @@ class LimitNotFound(NotFound): class NoLimitReference(Forbidden): - message_format = _("Unable to create a limit that has no corresponding " - "registered limit.") + message_format = _( + "Unable to create a limit that has no corresponding " + "registered limit." + ) class RegisteredLimitError(ForbiddenNotSecurity): - message_format = _("Unable to update or delete registered limit %(id)s " - "because there are project limits associated with it.") + message_format = _( + "Unable to update or delete registered limit %(id)s " + "because there are project limits associated with it." + ) class DomainConfigNotFound(NotFound): - message_format = _('Could not find %(group_or_option)s in domain ' - 'configuration for domain %(domain_id)s.') + message_format = _( + 'Could not find %(group_or_option)s in domain ' + 'configuration for domain %(domain_id)s.' + ) class ConfigRegistrationNotFound(Exception): @@ -548,8 +608,10 @@ class ConfigRegistrationNotFound(Exception): class ApplicationCredentialNotFound(NotFound): - message_format = _("Could not find Application Credential: " - "%(application_credential_id)s.") + message_format = _( + "Could not find Application Credential: " + "%(application_credential_id)s." + ) class AccessRuleNotFound(NotFound): @@ -557,8 +619,9 @@ class AccessRuleNotFound(NotFound): class Conflict(Error): - message_format = _("Conflict occurred attempting to store %(type)s -" - " %(details)s.") + message_format = _( + "Conflict occurred attempting to store %(type)s -" " %(details)s." + ) code = int(http.client.CONFLICT) title = http.client.responses[http.client.CONFLICT] @@ -566,11 +629,15 @@ class Conflict(Error): class UnexpectedError(SecurityError): """Avoids exposing details of failures, unless in insecure_debug mode.""" - message_format = _("An unexpected error prevented the server " - "from fulfilling your request.") + message_format = _( + "An unexpected error prevented the server " + "from fulfilling your request." + ) - debug_message_format = _("An unexpected error prevented the server " - "from fulfilling your request: %(exception)s.") + debug_message_format = _( + "An unexpected error prevented the server " + "from fulfilling your request: %(exception)s." + ) def _build_message(self, message, **kwargs): @@ -580,25 +647,30 @@ class UnexpectedError(SecurityError): kwargs.setdefault('exception', '') return super(UnexpectedError, self)._build_message( - message or self.debug_message_format, **kwargs) + message or self.debug_message_format, **kwargs + ) code = int(http.client.INTERNAL_SERVER_ERROR) title = http.client.responses[http.client.INTERNAL_SERVER_ERROR] class TrustConsumeMaximumAttempt(UnexpectedError): - debug_message_format = _("Unable to consume trust %(trust_id)s. Unable to " - "acquire lock.") + debug_message_format = _( + "Unable to consume trust %(trust_id)s. Unable to " "acquire lock." + ) class MalformedEndpoint(UnexpectedError): - debug_message_format = _("Malformed endpoint URL (%(endpoint)s)," - " see ERROR log for details.") + debug_message_format = _( + "Malformed endpoint URL (%(endpoint)s)," " see ERROR log for details." + ) class MappedGroupNotFound(UnexpectedError): - debug_message_format = _("Group %(group_id)s returned by mapping " - "%(mapping_id)s was not found in the backend.") + debug_message_format = _( + "Group %(group_id)s returned by mapping " + "%(mapping_id)s was not found in the backend." + ) class MetadataFileError(UnexpectedError): @@ -606,122 +678,154 @@ class MetadataFileError(UnexpectedError): class DirectMappingError(UnexpectedError): - debug_message_format = _("Local section in mapping %(mapping_id)s refers " - "to a remote match that doesn't exist " - "(e.g. {0} in a local section).") + debug_message_format = _( + "Local section in mapping %(mapping_id)s refers " + "to a remote match that doesn't exist " + "(e.g. {0} in a local section)." + ) class AssignmentTypeCalculationError(UnexpectedError): debug_message_format = _( 'Unexpected combination of grant attributes - ' 'User: %(user_id)s, Group: %(group_id)s, Project: %(project_id)s, ' - 'Domain: %(domain_id)s.') + 'Domain: %(domain_id)s.' + ) class NotImplemented(Error): - message_format = _("The action you have requested has not" - " been implemented.") + message_format = _( + "The action you have requested has not" " been implemented." + ) code = int(http.client.NOT_IMPLEMENTED) title = http.client.responses[http.client.NOT_IMPLEMENTED] class Gone(Error): - message_format = _("The service you have requested is no" - " longer available on this server.") + message_format = _( + "The service you have requested is no" + " longer available on this server." + ) code = int(http.client.GONE) title = http.client.responses[http.client.GONE] class ConfigFileNotFound(UnexpectedError): - debug_message_format = _("The Keystone configuration file %(config_file)s " - "could not be found.") + debug_message_format = _( + "The Keystone configuration file %(config_file)s " + "could not be found." + ) class KeysNotFound(UnexpectedError): - debug_message_format = _('No encryption keys found; run keystone-manage ' - 'fernet_setup to bootstrap one.') + debug_message_format = _( + 'No encryption keys found; run keystone-manage ' + 'fernet_setup to bootstrap one.' + ) class MultipleSQLDriversInConfig(UnexpectedError): - debug_message_format = _('The Keystone domain-specific configuration has ' - 'specified more than one SQL driver (only one is ' - 'permitted): %(source)s.') + debug_message_format = _( + 'The Keystone domain-specific configuration has ' + 'specified more than one SQL driver (only one is ' + 'permitted): %(source)s.' + ) class MigrationNotProvided(Exception): def __init__(self, mod_name, path): - super(MigrationNotProvided, self).__init__(_( - "%(mod_name)s doesn't provide database migrations. The migration" - " repository path at %(path)s doesn't exist or isn't a directory." - ) % {'mod_name': mod_name, 'path': path}) + super(MigrationNotProvided, self).__init__( + _( + "%(mod_name)s doesn't provide database migrations. The migration" + " repository path at %(path)s doesn't exist or isn't a directory." + ) + % {'mod_name': mod_name, 'path': path} + ) class UnsupportedTokenVersionException(UnexpectedError): - debug_message_format = _('Token version is unrecognizable or ' - 'unsupported.') + debug_message_format = _( + 'Token version is unrecognizable or ' 'unsupported.' + ) class SAMLSigningError(UnexpectedError): - debug_message_format = _('Unable to sign SAML assertion. It is likely ' - 'that this server does not have xmlsec1 ' - 'installed or this is the result of ' - 'misconfiguration. Reason %(reason)s.') + debug_message_format = _( + 'Unable to sign SAML assertion. It is likely ' + 'that this server does not have xmlsec1 ' + 'installed or this is the result of ' + 'misconfiguration. Reason %(reason)s.' + ) class OAuthHeadersMissingError(UnexpectedError): - debug_message_format = _('No Authorization headers found, cannot proceed ' - 'with OAuth related calls. If running under ' - 'HTTPd or Apache, ensure WSGIPassAuthorization ' - 'is set to On.') + debug_message_format = _( + 'No Authorization headers found, cannot proceed ' + 'with OAuth related calls. If running under ' + 'HTTPd or Apache, ensure WSGIPassAuthorization ' + 'is set to On.' + ) class TokenlessAuthConfigError(ValidationError): - message_format = _('Could not determine Identity Provider ID. The ' - 'configuration option %(issuer_attribute)s ' - 'was not found in the request environment.') + message_format = _( + 'Could not determine Identity Provider ID. The ' + 'configuration option %(issuer_attribute)s ' + 'was not found in the request environment.' + ) class CredentialEncryptionError(Exception): - message_format = _("An unexpected error prevented the server " - "from accessing encrypted credentials.") + message_format = _( + "An unexpected error prevented the server " + "from accessing encrypted credentials." + ) class LDAPServerConnectionError(UnexpectedError): - debug_message_format = _('Unable to establish a connection to ' - 'LDAP Server (%(url)s).') + debug_message_format = _( + 'Unable to establish a connection to ' 'LDAP Server (%(url)s).' + ) class LDAPInvalidCredentialsError(UnexpectedError): - message_format = _('Unable to authenticate against Identity backend - ' - 'Invalid username or password') + message_format = _( + 'Unable to authenticate against Identity backend - ' + 'Invalid username or password' + ) class LDAPSizeLimitExceeded(UnexpectedError): - message_format = _('Number of User/Group entities returned by LDAP ' - 'exceeded size limit. Contact your LDAP ' - 'administrator.') + message_format = _( + 'Number of User/Group entities returned by LDAP ' + 'exceeded size limit. Contact your LDAP ' + 'administrator.' + ) class CacheDeserializationError(Exception): def __init__(self, obj, data): super(CacheDeserializationError, self).__init__( - _('Failed to deserialize %(obj)s. Data is %(data)s') % { - 'obj': obj, 'data': data - } + _('Failed to deserialize %(obj)s. Data is %(data)s') + % {'obj': obj, 'data': data} ) class ResourceUpdateForbidden(ForbiddenNotSecurity): - message_format = _('Unable to update immutable %(type)s resource: ' - '`%(resource_id)s. Set resource option "immutable" ' - 'to false first.') + message_format = _( + 'Unable to update immutable %(type)s resource: ' + '`%(resource_id)s. Set resource option "immutable" ' + 'to false first.' + ) class ResourceDeleteForbidden(ForbiddenNotSecurity): - message_format = _('Unable to delete immutable %(type)s resource: ' - '`%(resource_id)s. Set resource option "immutable" ' - 'to false first.') + message_format = _( + 'Unable to delete immutable %(type)s resource: ' + '`%(resource_id)s. Set resource option "immutable" ' + 'to false first.' + ) class OAuth2Error(Error): diff --git a/keystone/federation/backends/sql.py b/keystone/federation/backends/sql.py index a8c8258ec8..d293b9429b 100644 --- a/keystone/federation/backends/sql.py +++ b/keystone/federation/backends/sql.py @@ -32,8 +32,11 @@ class FederationProtocolModel(sql.ModelBase, sql.ModelDictMixin): mutable_attributes = frozenset(['mapping_id', 'remote_id_attribute']) id = sql.Column(sql.String(64), primary_key=True) - idp_id = sql.Column(sql.String(64), sql.ForeignKey('identity_provider.id', - ondelete='CASCADE'), primary_key=True) + idp_id = sql.Column( + sql.String(64), + sql.ForeignKey('identity_provider.id', ondelete='CASCADE'), + primary_key=True, + ) mapping_id = sql.Column(sql.String(64), nullable=False) remote_id_attribute = sql.Column(sql.String(64)) @@ -52,10 +55,17 @@ class FederationProtocolModel(sql.ModelBase, sql.ModelDictMixin): class IdentityProviderModel(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'identity_provider' - attributes = ['id', 'domain_id', 'enabled', 'description', 'remote_ids', - 'authorization_ttl'] - mutable_attributes = frozenset(['description', 'enabled', 'remote_ids', - 'authorization_ttl']) + attributes = [ + 'id', + 'domain_id', + 'enabled', + 'description', + 'remote_ids', + 'authorization_ttl', + ] + mutable_attributes = frozenset( + ['description', 'enabled', 'remote_ids', 'authorization_ttl'] + ) id = sql.Column(sql.String(64), primary_key=True) domain_id = sql.Column(sql.String(64), nullable=False) @@ -63,13 +73,15 @@ class IdentityProviderModel(sql.ModelBase, sql.ModelDictMixin): description = sql.Column(sql.Text(), nullable=True) authorization_ttl = sql.Column(sql.Integer, nullable=True) - remote_ids = orm.relationship('IdPRemoteIdsModel', - order_by='IdPRemoteIdsModel.remote_id', - cascade='all, delete-orphan') + remote_ids = orm.relationship( + 'IdPRemoteIdsModel', + order_by='IdPRemoteIdsModel.remote_id', + cascade='all, delete-orphan', + ) expiring_user_group_memberships = orm.relationship( 'ExpiringUserGroupMembership', cascade='all, delete-orphan', - backref="idp" + backref="idp", ) @classmethod @@ -104,11 +116,11 @@ class IdPRemoteIdsModel(sql.ModelBase, sql.ModelDictMixin): attributes = ['idp_id', 'remote_id'] mutable_attributes = frozenset(['idp_id', 'remote_id']) - idp_id = sql.Column(sql.String(64), - sql.ForeignKey('identity_provider.id', - ondelete='CASCADE')) - remote_id = sql.Column(sql.String(255), - primary_key=True) + idp_id = sql.Column( + sql.String(64), + sql.ForeignKey('identity_provider.id', ondelete='CASCADE'), + ) + remote_id = sql.Column(sql.String(255), primary_key=True) @classmethod def from_dict(cls, dictionary): @@ -129,8 +141,9 @@ class MappingModel(sql.ModelBase, sql.ModelDictMixin): id = sql.Column(sql.String(64), primary_key=True) rules = sql.Column(sql.JsonBlob(), nullable=False) - schema_version = sql.Column(sql.String(5), nullable=False, - server_default='1.0') + schema_version = sql.Column( + sql.String(5), nullable=False, server_default='1.0' + ) @classmethod def from_dict(cls, dictionary): @@ -149,10 +162,17 @@ class MappingModel(sql.ModelBase, sql.ModelDictMixin): class ServiceProviderModel(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'service_provider' - attributes = ['auth_url', 'id', 'enabled', 'description', - 'relay_state_prefix', 'sp_url'] - mutable_attributes = frozenset(['auth_url', 'description', 'enabled', - 'relay_state_prefix', 'sp_url']) + attributes = [ + 'auth_url', + 'id', + 'enabled', + 'description', + 'relay_state_prefix', + 'sp_url', + ] + mutable_attributes = frozenset( + ['auth_url', 'description', 'enabled', 'relay_state_prefix', 'sp_url'] + ) id = sql.Column(sql.String(64), primary_key=True) enabled = sql.Column(sql.Boolean, nullable=False) @@ -181,8 +201,10 @@ class Federation(base.FederationDriverBase): def _handle_idp_conflict(self, e): conflict_type = 'identity_provider' details = str(e) - LOG.debug(self._CONFLICT_LOG_MSG, {'conflict_type': conflict_type, - 'details': details}) + LOG.debug( + self._CONFLICT_LOG_MSG, + {'conflict_type': conflict_type, 'details': details}, + ) if 'remote_id' in details: msg = _('Duplicate remote ID: %s') else: @@ -258,8 +280,7 @@ class Federation(base.FederationDriverBase): try: return q.one() except sql.NotFound: - kwargs = {'protocol_id': protocol_id, - 'idp_id': idp_id} + kwargs = {'protocol_id': protocol_id, 'idp_id': idp_id} raise exception.FederatedProtocolNotFound(**kwargs) @sql.handle_conflicts(conflict_type='federation_protocol') diff --git a/keystone/federation/core.py b/keystone/federation/core.py index bbf81bf2e1..b7b15c94ee 100644 --- a/keystone/federation/core.py +++ b/keystone/federation/core.py @@ -49,12 +49,14 @@ class Manager(manager.Manager): def __init__(self): super(Manager, self).__init__(CONF.federation.driver) notifications.register_event_callback( - notifications.ACTIONS.internal, notifications.DOMAIN_DELETED, - self._cleanup_identity_provider + notifications.ACTIONS.internal, + notifications.DOMAIN_DELETED, + self._cleanup_identity_provider, ) - def _cleanup_identity_provider(self, service, resource_type, operation, - payload): + def _cleanup_identity_provider( + self, service, resource_type, operation, payload + ): domain_id = payload['resource_info'] hints = driver_hints.Hints() hints.add_filter('domain_id', domain_id) @@ -63,10 +65,14 @@ class Manager(manager.Manager): try: self.delete_idp(idp['id']) except exception.IdentityProviderNotFound: - LOG.debug(('Identity Provider %(idpid)s not found when ' - 'deleting domain contents for %(domainid)s, ' - 'continuing with cleanup.'), - {'idpid': idp['id'], 'domainid': domain_id}) + LOG.debug( + ( + 'Identity Provider %(idpid)s not found when ' + 'deleting domain contents for %(domainid)s, ' + 'continuing with cleanup.' + ), + {'idpid': idp['id'], 'domainid': domain_id}, + ) def create_idp(self, idp_id, idp): auto_created_domain = False @@ -113,7 +119,7 @@ class Manager(manager.Manager): 'id': domain_id, 'name': domain_id, 'description': desc, - 'enabled': True + 'enabled': True, } PROVIDERS.resource_api.create_domain(domain['id'], domain) return domain_id @@ -137,12 +143,9 @@ class Manager(manager.Manager): :rtype: list of dicts """ + def normalize(sp): - ref = { - 'auth_url': sp.auth_url, - 'id': sp.id, - 'sp_url': sp.sp_url - } + ref = {'auth_url': sp.auth_url, 'id': sp.id, 'sp_url': sp.sp_url} return ref service_providers = self.driver.get_enabled_service_providers() @@ -166,7 +169,8 @@ class Manager(manager.Manager): mapping = self.get_mapping_from_idp_and_protocol(idp_id, protocol_id) rule_processor = utils.create_attribute_mapping_rules_processor( - mapping) + mapping + ) mapped_properties = rule_processor.process(assertion_data) return mapped_properties, mapping['id'] diff --git a/keystone/federation/idp.py b/keystone/federation/idp.py index 2f1a4fe5a4..ce2f0fa5ad 100644 --- a/keystone/federation/idp.py +++ b/keystone/federation/idp.py @@ -27,6 +27,7 @@ from saml2 import saml from saml2 import samlp from saml2.schema import soapenv from saml2 import sigver + xmldsig = importutils.try_import("saml2.xmldsig") if not xmldsig: xmldsig = importutils.try_import("xmldsig") @@ -47,9 +48,18 @@ class SAMLGenerator(object): def __init__(self): self.assertion_id = uuid.uuid4().hex - def samlize_token(self, issuer, recipient, user, user_domain_name, roles, - project, project_domain_name, groups, - expires_in=None): + def samlize_token( + self, + issuer, + recipient, + user, + user_domain_name, + roles, + project, + project_domain_name, + groups, + expires_in=None, + ): """Convert Keystone attributes to a SAML assertion. :param issuer: URL of the issuing party @@ -80,19 +90,24 @@ class SAMLGenerator(object): saml_issuer = self._create_issuer(issuer) subject = self._create_subject(user, expiration_time, recipient) attribute_statement = self._create_attribute_statement( - user, user_domain_name, roles, project, project_domain_name, - groups) + user, user_domain_name, roles, project, project_domain_name, groups + ) authn_statement = self._create_authn_statement(issuer, expiration_time) signature = self._create_signature() - assertion = self._create_assertion(saml_issuer, signature, - subject, authn_statement, - attribute_statement) + assertion = self._create_assertion( + saml_issuer, + signature, + subject, + authn_statement, + attribute_statement, + ) assertion = _sign_assertion(assertion) - response = self._create_response(saml_issuer, status, assertion, - recipient) + response = self._create_response( + saml_issuer, status, assertion, recipient + ) return response def _determine_expiration_time(self, expires_in): @@ -166,9 +181,15 @@ class SAMLGenerator(object): subject.name_id = name_id return subject - def _create_attribute_statement(self, user, user_domain_name, roles, - project, project_domain_name, - groups): + def _create_attribute_statement( + self, + user, + user_domain_name, + roles, + project, + project_domain_name, + groups, + ): """Create an object that represents a SAML AttributeStatement. @@ -208,6 +229,7 @@ class SAMLGenerator(object): :returns: XML object """ + def _build_attribute(attribute_name, attribute_values): attribute = saml.Attribute() attribute.name = attribute_name @@ -223,9 +245,11 @@ class SAMLGenerator(object): roles_attribute = _build_attribute('openstack_roles', roles) project_attribute = _build_attribute('openstack_project', [project]) project_domain_attribute = _build_attribute( - 'openstack_project_domain', [project_domain_name]) + 'openstack_project_domain', [project_domain_name] + ) user_domain_attribute = _build_attribute( - 'openstack_user_domain', [user_domain_name]) + 'openstack_user_domain', [user_domain_name] + ) attribute_statement = saml.AttributeStatement() attribute_statement.attribute.append(user_attribute) @@ -235,8 +259,7 @@ class SAMLGenerator(object): attribute_statement.attribute.append(user_domain_attribute) if groups: - groups_attribute = _build_attribute( - 'openstack_groups', groups) + groups_attribute = _build_attribute('openstack_groups', groups) attribute_statement.attribute.append(groups_attribute) return attribute_statement @@ -277,8 +300,9 @@ class SAMLGenerator(object): return authn_statement - def _create_assertion(self, issuer, signature, subject, authn_statement, - attribute_statement): + def _create_assertion( + self, issuer, signature, subject, authn_statement, attribute_statement + ): """Create an object that represents a SAML Assertion. = 7.1.0 @@ -424,23 +450,24 @@ def _verify_assertion_binary_is_installed(): # the binary exists, though. We just want to make sure it's actually # installed and if an `CalledProcessError` isn't thrown, it is. subprocess.check_output( # nosec : The contents of this command are - # coming from either the default - # configuration value for - # CONF.saml.xmlsec1_binary or an operator - # supplied location for that binary. In - # either case, it is safe to assume this - # input is coming from a trusted source and - # not a possible attacker (over the API). + # coming from either the default + # configuration value for + # CONF.saml.xmlsec1_binary or an operator + # supplied location for that binary. In + # either case, it is safe to assume this + # input is coming from a trusted source and + # not a possible attacker (over the API). ['/usr/bin/which', CONF.saml.xmlsec1_binary] ) except subprocess.CalledProcessError: msg = ( 'Unable to locate %(binary)s binary on the system. Check to make ' - 'sure it is installed.') % {'binary': CONF.saml.xmlsec1_binary} + 'sure it is installed.' + ) % {'binary': CONF.saml.xmlsec1_binary} tr_msg = _( 'Unable to locate %(binary)s binary on the system. Check to ' - 'make sure it is installed.') % { - 'binary': CONF.saml.xmlsec1_binary} + 'make sure it is installed.' + ) % {'binary': CONF.saml.xmlsec1_binary} LOG.error(msg) raise exception.SAMLSigningError(reason=tr_msg) @@ -470,8 +497,9 @@ def _sign_assertion(assertion): if ',' in getattr(CONF.saml, option, ''): raise exception.UnexpectedError( 'The configuration value in `keystone.conf [saml] %s` cannot ' - 'contain a comma (`,`). Please fix your configuration.' % - option) + 'contain a comma (`,`). Please fix your configuration.' + % option + ) # xmlsec1 --sign --privkey-pem privkey,cert --id-attr:ID certificates = '%(idp_private_key)s,%(idp_public_key)s' % { @@ -487,36 +515,49 @@ def _sign_assertion(assertion): _verify_assertion_binary_is_installed() command_list = [ - CONF.saml.xmlsec1_binary, '--sign', '--privkey-pem', certificates, - '--id-attr:ID', 'Assertion'] + CONF.saml.xmlsec1_binary, + '--sign', + '--privkey-pem', + certificates, + '--id-attr:ID', + 'Assertion', + ] file_path = None try: # NOTE(gyee): need to make the namespace prefixes explicit so # they won't get reassigned when we wrap the assertion into # SAML2 response - file_path = fileutils.write_to_tempfile(assertion.to_string( - nspair={'saml': saml2.NAMESPACE, - 'xmldsig': xmldsig.NAMESPACE})) + file_path = fileutils.write_to_tempfile( + assertion.to_string( + nspair={'saml': saml2.NAMESPACE, 'xmldsig': xmldsig.NAMESPACE} + ) + ) command_list.append(file_path) - stdout = subprocess.check_output(command_list, # nosec : The contents - # of the command list are coming from - # a trusted source because the - # executable and arguments all either - # come from the config file or are - # hardcoded. The command list is - # initialized earlier in this function - # to a list and it's still a list at - # this point in the function. There is - # no opportunity for an attacker to - # attempt command injection via string - # parsing. - stderr=subprocess.STDOUT) + stdout = subprocess.check_output( + command_list, # nosec : The contents + # of the command list are coming from + # a trusted source because the + # executable and arguments all either + # come from the config file or are + # hardcoded. The command list is + # initialized earlier in this function + # to a list and it's still a list at + # this point in the function. There is + # no opportunity for an attacker to + # attempt command injection via string + # parsing. + stderr=subprocess.STDOUT, + ) except Exception as e: msg = 'Error when signing assertion, reason: %(reason)s%(output)s' - LOG.error(msg, - {'reason': e, - 'output': ' ' + e.output if hasattr(e, 'output') else ''}) + LOG.error( + msg, + { + 'reason': e, + 'output': ' ' + e.output if hasattr(e, 'output') else '', + }, + ) raise exception.SAMLSigningError(reason=e) finally: try: @@ -544,8 +585,7 @@ class MetadataGenerator(object): """ self._ensure_required_values_present() entity_descriptor = self._create_entity_descriptor() - entity_descriptor.idpsso_descriptor = ( - self._create_idp_sso_descriptor()) + entity_descriptor.idpsso_descriptor = self._create_idp_sso_descriptor() return entity_descriptor def _create_entity_descriptor(self): @@ -559,12 +599,14 @@ class MetadataGenerator(object): try: return sigver.read_cert_from_file(CONF.saml.certfile, 'pem') except (IOError, sigver.CertificateError) as e: - msg = ('Cannot open certificate %(cert_file)s.' - 'Reason: %(reason)s') % { - 'cert_file': CONF.saml.certfile, 'reason': e} - tr_msg = _('Cannot open certificate %(cert_file)s.' - 'Reason: %(reason)s') % { - 'cert_file': CONF.saml.certfile, 'reason': e} + msg = ( + 'Cannot open certificate %(cert_file)s.' + 'Reason: %(reason)s' + ) % {'cert_file': CONF.saml.certfile, 'reason': e} + tr_msg = _( + 'Cannot open certificate %(cert_file)s.' + 'Reason: %(reason)s' + ) % {'cert_file': CONF.saml.certfile, 'reason': e} LOG.error(msg) raise IOError(tr_msg) @@ -575,27 +617,33 @@ class MetadataGenerator(object): x509_data=xmldsig.X509Data( x509_certificate=xmldsig.X509Certificate(text=cert) ) - ), use='signing' + ), + use='signing', ) def single_sign_on_service(): idp_sso_endpoint = CONF.saml.idp_sso_endpoint return md.SingleSignOnService( - binding=saml2.BINDING_URI, - location=idp_sso_endpoint) + binding=saml2.BINDING_URI, location=idp_sso_endpoint + ) def organization(): - name = md.OrganizationName(lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_name) + name = md.OrganizationName( + lang=CONF.saml.idp_lang, text=CONF.saml.idp_organization_name + ) display_name = md.OrganizationDisplayName( lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_display_name) - url = md.OrganizationURL(lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_url) + text=CONF.saml.idp_organization_display_name, + ) + url = md.OrganizationURL( + lang=CONF.saml.idp_lang, text=CONF.saml.idp_organization_url + ) return md.Organization( organization_display_name=display_name, - organization_url=url, organization_name=name) + organization_url=url, + organization_name=name, + ) def contact_person(): company = md.Company(text=CONF.saml.idp_contact_company) @@ -603,13 +651,18 @@ class MetadataGenerator(object): surname = md.SurName(text=CONF.saml.idp_contact_surname) email = md.EmailAddress(text=CONF.saml.idp_contact_email) telephone = md.TelephoneNumber( - text=CONF.saml.idp_contact_telephone) + text=CONF.saml.idp_contact_telephone + ) contact_type = CONF.saml.idp_contact_type return md.ContactPerson( - company=company, given_name=given_name, sur_name=surname, - email_address=email, telephone_number=telephone, - contact_type=contact_type) + company=company, + given_name=given_name, + sur_name=surname, + email_address=email, + telephone_number=telephone, + contact_type=contact_type, + ) def name_id_format(): return md.NameIDFormat(text=saml.NAMEID_FORMAT_TRANSIENT) @@ -637,12 +690,14 @@ class MetadataGenerator(object): def _check_contact_person_values(self): """Determine if contact information is included in metadata.""" # Check if we should include contact information - params = [CONF.saml.idp_contact_company, - CONF.saml.idp_contact_name, - CONF.saml.idp_contact_surname, - CONF.saml.idp_contact_email, - CONF.saml.idp_contact_telephone, - CONF.saml.idp_contact_type] + params = [ + CONF.saml.idp_contact_company, + CONF.saml.idp_contact_name, + CONF.saml.idp_contact_surname, + CONF.saml.idp_contact_email, + CONF.saml.idp_contact_telephone, + CONF.saml.idp_contact_type, + ] for value in params: if value is None: return False @@ -651,9 +706,11 @@ class MetadataGenerator(object): def _check_organization_values(self): """Determine if organization information is included in metadata.""" - params = [CONF.saml.idp_organization_name, - CONF.saml.idp_organization_display_name, - CONF.saml.idp_organization_url] + params = [ + CONF.saml.idp_organization_name, + CONF.saml.idp_organization_display_name, + CONF.saml.idp_organization_url, + ] for value in params: if value is None: return False @@ -673,16 +730,18 @@ class ECPGenerator(object): def _create_header(self, relay_state_prefix): relay_state_text = relay_state_prefix + uuid.uuid4().hex - relay_state = ecp.RelayState(actor=client_base.ACTOR, - must_understand='1', - text=relay_state_text) + relay_state = ecp.RelayState( + actor=client_base.ACTOR, must_understand='1', text=relay_state_text + ) header = soapenv.Header() - header.extension_elements = ( - [saml2.element_to_extension_element(relay_state)]) + header.extension_elements = [ + saml2.element_to_extension_element(relay_state) + ] return header def _create_body(self, saml_assertion): body = soapenv.Body() - body.extension_elements = ( - [saml2.element_to_extension_element(saml_assertion)]) + body.extension_elements = [ + saml2.element_to_extension_element(saml_assertion) + ] return body diff --git a/keystone/federation/schema.py b/keystone/federation/schema.py index 9e9ee51488..41a738de23 100644 --- a/keystone/federation/schema.py +++ b/keystone/federation/schema.py @@ -16,13 +16,9 @@ from keystone.common.validation import parameter_types basic_property_id = { 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - } - }, + 'properties': {'id': {'type': 'string'}}, 'required': ['id'], - 'additionalProperties': False + 'additionalProperties': False, } saml_create = { @@ -32,24 +28,20 @@ saml_create = { 'type': 'object', 'properties': { 'token': basic_property_id, - 'methods': { - 'type': 'array' - } + 'methods': {'type': 'array'}, }, 'required': ['token'], - 'additionalProperties': False + 'additionalProperties': False, }, 'scope': { 'type': 'object', - 'properties': { - 'service_provider': basic_property_id - }, + 'properties': {'service_provider': basic_property_id}, 'required': ['service_provider'], - 'additionalProperties': False + 'additionalProperties': False, }, }, 'required': ['identity', 'scope'], - 'additionalProperties': False + 'additionalProperties': False, } _service_provider_properties = { @@ -59,7 +51,7 @@ _service_provider_properties = { 'sp_url': parameter_types.url, 'description': validation.nullable(parameter_types.description), 'enabled': parameter_types.boolean, - 'relay_state_prefix': validation.nullable(parameter_types.description) + 'relay_state_prefix': validation.nullable(parameter_types.description), } service_provider_create = { @@ -67,7 +59,7 @@ service_provider_create = { 'properties': _service_provider_properties, # NOTE(rodrigods): 'id' is not required since it is passed in the URL 'required': ['auth_url', 'sp_url'], - 'additionalProperties': False + 'additionalProperties': False, } service_provider_update = { @@ -75,7 +67,7 @@ service_provider_update = { 'properties': _service_provider_properties, # Make sure at least one property is being updated 'minProperties': 1, - 'additionalProperties': False + 'additionalProperties': False, } _identity_provider_properties_create = { @@ -85,11 +77,9 @@ _identity_provider_properties_create = { 'authorization_ttl': validation.nullable(parameter_types.integer_min0), 'remote_ids': { 'type': ['array', 'null'], - 'items': { - 'type': 'string' - }, - 'uniqueItems': True - } + 'items': {'type': 'string'}, + 'uniqueItems': True, + }, } _identity_provider_properties_update = { @@ -98,17 +88,15 @@ _identity_provider_properties_update = { 'authorization_ttl': validation.nullable(parameter_types.integer_min0), 'remote_ids': { 'type': ['array', 'null'], - 'items': { - 'type': 'string' - }, - 'uniqueItems': True - } + 'items': {'type': 'string'}, + 'uniqueItems': True, + }, } identity_provider_create = { 'type': 'object', 'properties': _identity_provider_properties_create, - 'additionalProperties': False + 'additionalProperties': False, } identity_provider_update = { @@ -116,7 +104,7 @@ identity_provider_update = { 'properties': _identity_provider_properties_update, # Make sure at least one property is being updated 'minProperties': 1, - 'additionalProperties': False + 'additionalProperties': False, } _remote_id_attribute_properties = { @@ -126,19 +114,19 @@ _remote_id_attribute_properties = { _protocol_properties = { 'mapping_id': parameter_types.mapping_id_string, - 'remote_id_attribute': _remote_id_attribute_properties + 'remote_id_attribute': _remote_id_attribute_properties, } protocol_create = { 'type': 'object', 'properties': _protocol_properties, 'required': ['mapping_id'], - 'additionalProperties': False + 'additionalProperties': False, } protocol_update = { 'type': 'object', 'properties': _protocol_properties, 'minProperties': 1, - 'additionalProperties': False + 'additionalProperties': False, } diff --git a/keystone/federation/utils.py b/keystone/federation/utils.py index dc983a6e9d..ca0e7c56e1 100644 --- a/keystone/federation/utils.py +++ b/keystone/federation/utils.py @@ -47,12 +47,10 @@ ROLE_PROPERTIES = { "type": "object", "required": ["name"], "properties": { - "name": { - "type": "string" - }, + "name": {"type": "string"}, }, - "additionalProperties": False - } + "additionalProperties": False, + }, } PROJECTS_SCHEMA = { @@ -61,11 +59,8 @@ PROJECTS_SCHEMA = { "type": "object", "required": ["name", "roles"], "additionalProperties": False, - "properties": { - "name": {"type": "string"}, - "roles": ROLE_PROPERTIES - } - } + "properties": {"name": {"type": "string"}, "roles": ROLE_PROPERTIES}, + }, } IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0 = { @@ -97,29 +92,29 @@ IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0 = { }, "type": { "type": "string", - "enum": [UserType.EPHEMERAL, - UserType.LOCAL] - } + "enum": [ + UserType.EPHEMERAL, + UserType.LOCAL, + ], + }, }, - "additionalProperties": False + "additionalProperties": False, }, "projects": PROJECTS_SCHEMA, "group": { "type": "object", "oneOf": [ {"$ref": "#/definitions/group_by_id"}, - {"$ref": "#/definitions/group_by_name"} - ] - }, - "groups": { - "type": "string" - }, - "group_ids": { - "type": "string" + { + "$ref": "#/definitions/group_by_name" + }, + ], }, + "groups": {"type": "string"}, + "group_ids": {"type": "string"}, "domain": {"$ref": "#/definitions/domain"}, - } - } + }, + }, }, "remote": { "minItems": 1, @@ -131,25 +126,21 @@ IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0 = { {"$ref": "#/definitions/any_one_of"}, {"$ref": "#/definitions/not_any_of"}, {"$ref": "#/definitions/blacklist"}, - {"$ref": "#/definitions/whitelist"} + {"$ref": "#/definitions/whitelist"}, ], - } - } - } - } + }, + }, + }, + }, }, - "schema_version": { - "name": {"type": "string"} - } + "schema_version": {"name": {"type": "string"}}, }, "definitions": { "empty": { "type": "object", "required": ['type'], "properties": { - "type": { - "type": "string" - }, + "type": {"type": "string"}, }, "additionalProperties": False, }, @@ -158,104 +149,81 @@ IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0 = { "additionalProperties": False, "required": ['type', 'any_one_of'], "properties": { - "type": { - "type": "string" - }, - "any_one_of": { - "type": "array" - }, - "regex": { - "type": "boolean" - } - } + "type": {"type": "string"}, + "any_one_of": {"type": "array"}, + "regex": {"type": "boolean"}, + }, }, "not_any_of": { "type": "object", "additionalProperties": False, "required": ['type', 'not_any_of'], "properties": { - "type": { - "type": "string" - }, - "not_any_of": { - "type": "array" - }, - "regex": { - "type": "boolean" - } - } + "type": {"type": "string"}, + "not_any_of": {"type": "array"}, + "regex": {"type": "boolean"}, + }, }, "blacklist": { "type": "object", "additionalProperties": False, "required": ['type', 'blacklist'], "properties": { - "type": { - "type": "string" - }, - "blacklist": { - "type": "array" - }, - "regex": { - "type": "boolean" - } - } + "type": {"type": "string"}, + "blacklist": {"type": "array"}, + "regex": {"type": "boolean"}, + }, }, "whitelist": { "type": "object", "additionalProperties": False, "required": ['type', 'whitelist'], "properties": { - "type": { - "type": "string" - }, - "whitelist": { - "type": "array" - }, - "regex": { - "type": "boolean" - } - } + "type": {"type": "string"}, + "whitelist": {"type": "array"}, + "regex": {"type": "boolean"}, + }, }, "domain": { "type": "object", "properties": { "id": {"type": "string"}, - "name": {"type": "string"} + "name": {"type": "string"}, }, - "additionalProperties": False + "additionalProperties": False, }, "group_by_id": { "type": "object", - "properties": { - "id": {"type": "string"} - }, + "properties": {"id": {"type": "string"}}, "additionalProperties": False, - "required": ["id"] + "required": ["id"], }, "group_by_name": { "type": "object", "properties": { "name": {"type": "string"}, - "domain": {"$ref": "#/definitions/domain"} + "domain": {"$ref": "#/definitions/domain"}, }, "additionalProperties": False, - "required": ["name", "domain"] - } - } + "required": ["name", "domain"], + }, + }, } # `IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0` adds the domain option for projects, # the goal is to work in a similar fashion as `user` and `groups` properties IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0 = copy.deepcopy( - IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0) + IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0 +) PROJECTS_SCHEMA_2_0 = copy.deepcopy(PROJECTS_SCHEMA) -PROJECTS_SCHEMA_2_0["items"]["properties"][ - "domain"] = {"$ref": "#/definitions/domain"} +PROJECTS_SCHEMA_2_0["items"]["properties"]["domain"] = { + "$ref": "#/definitions/domain" +} IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0['properties']['rules']['items']['properties'][ - 'local']['items']['properties']['projects'] = PROJECTS_SCHEMA_2_0 + 'local' +]['items']['properties']['projects'] = PROJECTS_SCHEMA_2_0 def get_default_attribute_mapping_schema_version(): @@ -294,13 +262,18 @@ class DirectMaps(object): def validate_mapping_structure(ref): version = ref.get( - 'schema_version', get_default_attribute_mapping_schema_version()) + 'schema_version', get_default_attribute_mapping_schema_version() + ) - LOG.debug("Validating mapping [%s] using validator from version [%s].", - ref, version) + LOG.debug( + "Validating mapping [%s] using validator from version [%s].", + ref, + version, + ) v = jsonschema.Draft4Validator( - IDP_ATTRIBUTE_MAPPING_SCHEMAS[version]['schema']) + IDP_ATTRIBUTE_MAPPING_SCHEMAS[version]['schema'] + ) messages = '' for error in sorted(v.iter_errors(ref), key=str): @@ -334,8 +307,9 @@ def get_remote_id_parameter(idp, protocol): remote_id_parameter = CONF[protocol]['remote_id_attribute'] except AttributeError: # TODO(dolph): Move configuration registration to keystone.conf - CONF.register_opt(cfg.StrOpt('remote_id_attribute'), - group=protocol) + CONF.register_opt( + cfg.StrOpt('remote_id_attribute'), group=protocol + ) try: remote_id_parameter = CONF[protocol]['remote_id_attribute'] except AttributeError: # nosec @@ -343,9 +317,12 @@ def get_remote_id_parameter(idp, protocol): # instead. pass if not remote_id_parameter: - LOG.debug('Cannot find "remote_id_attribute" in configuration ' - 'group %s. Trying default location in ' - 'group federation.', protocol) + LOG.debug( + 'Cannot find "remote_id_attribute" in configuration ' + 'group %s. Trying default location in ' + 'group federation.', + protocol, + ) remote_id_parameter = CONF.federation.remote_id_attribute return remote_id_parameter @@ -362,12 +339,15 @@ def validate_idp(idp, protocol, assertion): try: idp_remote_identifier = assertion[remote_id_parameter] except KeyError: - msg = _('Could not find Identity Provider identifier in ' - 'environment') + msg = _( + 'Could not find Identity Provider identifier in ' 'environment' + ) raise exception.ValidationError(msg) if idp_remote_identifier not in idp['remote_ids']: - msg = _('Incoming identity provider identifier not included ' - 'among the accepted identifiers.') + msg = _( + 'Incoming identity provider identifier not included ' + 'among the accepted identifiers.' + ) raise exception.Forbidden(msg) @@ -394,13 +374,15 @@ def validate_mapped_group_ids(group_ids, mapping_id, identity_api): identity_api.get_group(group_id) except exception.GroupNotFound: raise exception.MappedGroupNotFound( - group_id=group_id, mapping_id=mapping_id) + group_id=group_id, mapping_id=mapping_id + ) # TODO(marek-denis): Optimize this function, so the number of calls to the # backend are minimized. -def transform_to_group_ids(group_names, mapping_id, - identity_api, resource_api): +def transform_to_group_ids( + group_names, mapping_id, identity_api, resource_api +): """Transform groups identified by name/domain to their ids. Function accepts list of groups identified by a name and domain giving @@ -437,6 +419,7 @@ def transform_to_group_ids(group_names, mapping_id, :returns: generator object with group ids """ + def resolve_domain(domain): """Return domain id. @@ -448,19 +431,19 @@ def transform_to_group_ids(group_names, mapping_id, :rtype: str """ - domain_id = (domain.get('id') or - resource_api.get_domain_by_name( - domain.get('name')).get('id')) + domain_id = domain.get('id') or resource_api.get_domain_by_name( + domain.get('name') + ).get('id') return domain_id for group in group_names: try: group_dict = identity_api.get_group_by_name( - group['name'], resolve_domain(group['domain'])) + group['name'], resolve_domain(group['domain']) + ) yield group_dict['id'] except exception.GroupNotFound: - LOG.debug('Group %s has no entry in the backend', - group['name']) + LOG.debug('Group %s has no entry in the backend', group['name']) def get_assertion_params_from_env(): @@ -560,15 +543,19 @@ class RuleProcessor(object): # This will create a new dictionary where the values are arrays, and # any multiple values are stored in the arrays. LOG.debug('assertion data: %s', assertion_data) - assertion = {n: v.split(';') for n, v in assertion_data.items() - if isinstance(v, str)} + assertion = { + n: v.split(';') + for n, v in assertion_data.items() + if isinstance(v, str) + } LOG.debug('assertion: %s', assertion) identity_values = [] LOG.debug('rules: %s', self.rules) for rule in self.rules: - direct_maps = self._verify_all_requirements(rule['remote'], - assertion) + direct_maps = self._verify_all_requirements( + rule['remote'], assertion + ) # If the compare comes back as None, then the rule did not apply # to the assertion data, go on to the next rule @@ -618,23 +605,27 @@ class RuleProcessor(object): group_dicts = [convert_json(g) for g in group_names_list] for g in group_dicts: if 'domain' not in g: - msg = _("Invalid rule: %(identity_value)s. Both " - "'groups' and 'domain' keywords must be " - "specified.") + msg = _( + "Invalid rule: %(identity_value)s. Both " + "'groups' and 'domain' keywords must be " + "specified." + ) msg = msg % {'identity_value': identity_value} raise exception.ValidationError(msg) else: if 'domain' not in identity_value: - msg = _("Invalid rule: %(identity_value)s. Both " - "'groups' and 'domain' keywords must be " - "specified.") + msg = _( + "Invalid rule: %(identity_value)s. Both " + "'groups' and 'domain' keywords must be " + "specified." + ) msg = msg % {'identity_value': identity_value} raise exception.ValidationError(msg) - group_names_list = self._ast_literal_eval( - identity_value['groups']) + group_names_list = self._ast_literal_eval(identity_value['groups']) domain = identity_value['domain'] - group_dicts = [{'name': name, 'domain': domain} for name in - group_names_list] + group_dicts = [ + {'name': name, 'domain': domain} for name in group_names_list + ] return group_dicts def normalize_user(self, user, default_mapping_domain): @@ -694,12 +685,16 @@ class RuleProcessor(object): # if mapping yield no valid identity values, we should bail right away # instead of continuing on with a normalized bogus user if not identity_values: - msg = ("Could not map any federated user properties to identity " - "values. Check debug logs or the mapping used for " - "additional details.") - tr_msg = _("Could not map any federated user properties to " - "identity values. Check debug logs or the mapping " - "used for additional details.") + msg = ( + "Could not map any federated user properties to identity " + "values. Check debug logs or the mapping used for " + "additional details." + ) + tr_msg = _( + "Could not map any federated user properties to " + "identity values. Check debug logs or the mapping " + "used for additional details." + ) LOG.warning(msg) raise exception.ValidationError(tr_msg) @@ -707,8 +702,9 @@ class RuleProcessor(object): if 'user' in identity_value: # if a mapping outputs more than one user name, log it if user: - LOG.warning('Ignoring user [%s]', - identity_value.get('user')) + LOG.warning( + 'Ignoring user [%s]', identity_value.get('user') + ) else: user = identity_value.get('user') @@ -718,7 +714,8 @@ class RuleProcessor(object): group_ids.add(group['id']) elif 'name' in group: groups = self.process_group_by_name( - group, groups_by_domain) + group, groups_by_domain + ) group_names.extend(groups) if 'groups' in identity_value: group_dicts = self._normalize_groups(identity_value) @@ -730,20 +727,23 @@ class RuleProcessor(object): # parsed as a simple string, and not a list or the # representation of a list. group_ids.update( - self._ast_literal_eval(identity_value['group_ids'])) + self._ast_literal_eval(identity_value['group_ids']) + ) if 'projects' in identity_value: projects = self.extract_projects(identity_value) self.normalize_user(user, identity_value.get('domain')) - return {'user': user, - 'group_ids': list(group_ids), - 'group_names': group_names, - 'projects': projects} + return { + 'user': user, + 'group_ids': list(group_ids), + 'group_names': group_names, + 'projects': projects, + } def process_group_by_name(self, group, groups_by_domain): - domain = (group['domain'].get('name') or group['domain'].get('id')) + domain = group['domain'].get('name') or group['domain'].get('id') groups_by_domain.setdefault(domain, list()).append(group) return self.extract_groups(groups_by_domain) @@ -783,14 +783,16 @@ class RuleProcessor(object): if isinstance(v, dict): new_value = self._update_local_mapping(v, direct_maps) elif isinstance(v, list): - new_value = [self._update_local_mapping(item, direct_maps) - for item in v] + new_value = [ + self._update_local_mapping(item, direct_maps) for item in v + ] else: try: new_value = v.format(*direct_maps) except IndexError: raise exception.DirectMappingError( - mapping_id=self.mapping_id) + mapping_id=self.mapping_id + ) new[k] = new_value return new @@ -859,20 +861,24 @@ class RuleProcessor(object): any_one_values = requirement.get(self._EvalType.ANY_ONE_OF) if any_one_values is not None: - if self._evaluate_requirement(any_one_values, - direct_map_values, - self._EvalType.ANY_ONE_OF, - regex): + if self._evaluate_requirement( + any_one_values, + direct_map_values, + self._EvalType.ANY_ONE_OF, + regex, + ): continue else: return None not_any_values = requirement.get(self._EvalType.NOT_ANY_OF) if not_any_values is not None: - if self._evaluate_requirement(not_any_values, - direct_map_values, - self._EvalType.NOT_ANY_OF, - regex): + if self._evaluate_requirement( + not_any_values, + direct_map_values, + self._EvalType.NOT_ANY_OF, + regex, + ): continue else: return None @@ -886,17 +892,19 @@ class RuleProcessor(object): # If a blacklist or whitelist is used, we want to map to the # whole list instead of just its values separately. if blacklisted_values is not None: - direct_map_values = ( - self._evaluate_requirement(blacklisted_values, - direct_map_values, - self._EvalType.BLACKLIST, - regex)) + direct_map_values = self._evaluate_requirement( + blacklisted_values, + direct_map_values, + self._EvalType.BLACKLIST, + regex, + ) elif whitelisted_values is not None: - direct_map_values = ( - self._evaluate_requirement(whitelisted_values, - direct_map_values, - self._EvalType.WHITELIST, - regex)) + direct_map_values = self._evaluate_requirement( + whitelisted_values, + direct_map_values, + self._EvalType.WHITELIST, + regex, + ) direct_maps.add(direct_map_values) @@ -906,12 +914,14 @@ class RuleProcessor(object): def _evaluate_values_by_regex(self, values, assertion_values): return [ - assertion for assertion in assertion_values + assertion + for assertion in assertion_values if any([re.search(regex, assertion) for regex in values]) ] - def _evaluate_requirement(self, values, assertion_values, - eval_type, regex): + def _evaluate_requirement( + self, values, assertion_values, eval_type, regex + ): """Evaluate the incoming requirement and assertion. Filter the incoming assertions against the requirement values. If regex @@ -956,17 +966,16 @@ class RuleProcessor(object): return list(matches) else: raise exception.UnexpectedError( - _('Unexpected evaluation type "%(eval_type)s"') % { - 'eval_type': eval_type}) + _('Unexpected evaluation type "%(eval_type)s"') + % {'eval_type': eval_type} + ) def assert_enabled_identity_provider(federation_api, idp_id): identity_provider = federation_api.get_idp(idp_id) if identity_provider.get('enabled') is not True: - msg = 'Identity Provider %(idp)s is disabled' % { - 'idp': idp_id} - tr_msg = _('Identity Provider %(idp)s is disabled') % { - 'idp': idp_id} + msg = 'Identity Provider %(idp)s is disabled' % {'idp': idp_id} + tr_msg = _('Identity Provider %(idp)s is disabled') % {'idp': idp_id} LOG.debug(msg) raise exception.Forbidden(tr_msg) @@ -993,41 +1002,58 @@ class RuleProcessorToHonorDomainOption(RuleProcessor): def __init__(self, mapping_id, rules): super(RuleProcessorToHonorDomainOption, self).__init__( - mapping_id, rules) + mapping_id, rules + ) def extract_projects(self, identity_value): projects = identity_value.get("projects", []) default_mapping_domain = identity_value.get("domain") for project in projects: if not project.get("domain"): - LOG.debug("Configuring the domain [%s] for project [%s].", - default_mapping_domain, project) + LOG.debug( + "Configuring the domain [%s] for project [%s].", + default_mapping_domain, + project, + ) project["domain"] = default_mapping_domain return projects def normalize_user(self, user, default_mapping_domain): super(RuleProcessorToHonorDomainOption, self).normalize_user( - user, default_mapping_domain) + user, default_mapping_domain + ) if not user.get("domain"): - LOG.debug("Configuring the domain [%s] for user [%s].", - default_mapping_domain, user) + LOG.debug( + "Configuring the domain [%s] for user [%s].", + default_mapping_domain, + user, + ) user["domain"] = default_mapping_domain else: - LOG.debug("The user [%s] was configured with a domain. " - "Therefore, we do not need to define.", user) + LOG.debug( + "The user [%s] was configured with a domain. " + "Therefore, we do not need to define.", + user, + ) IDP_ATTRIBUTE_MAPPING_SCHEMAS = { - "1.0": {"schema": IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0, - "processor": RuleProcessor}, - "2.0": {"schema": IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0, - "processor": RuleProcessorToHonorDomainOption} + "1.0": { + "schema": IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0, + "processor": RuleProcessor, + }, + "2.0": { + "schema": IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0, + "processor": RuleProcessorToHonorDomainOption, + }, } def create_attribute_mapping_rules_processor(mapping): version = mapping.get( - 'schema_version', get_default_attribute_mapping_schema_version()) + 'schema_version', get_default_attribute_mapping_schema_version() + ) return IDP_ATTRIBUTE_MAPPING_SCHEMAS[version]['processor']( - mapping['id'], mapping['rules']) + mapping['id'], mapping['rules'] + ) diff --git a/keystone/identity/backends/base.py b/keystone/identity/backends/base.py index 666540cc8b..057a5252e8 100644 --- a/keystone/identity/backends/base.py +++ b/keystone/identity/backends/base.py @@ -154,8 +154,12 @@ class IdentityDriverBase(object, metaclass=abc.ABCMeta): conf = self._get_conf() # use list_limit from domain-specific config. If list_limit in # domain-specific config is not set, look it up in the default config - return (conf.identity.list_limit or conf.list_limit or - CONF.identity.list_limit or CONF.list_limit) + return ( + conf.identity.list_limit + or conf.list_limit + or CONF.identity.list_limit + or CONF.list_limit + ) def is_domain_aware(self): """Indicate if the driver supports domains.""" @@ -168,8 +172,10 @@ class IdentityDriverBase(object, metaclass=abc.ABCMeta): @property def multiple_domains_supported(self): - return (self.is_domain_aware() or - CONF.identity.domain_specific_drivers_enabled) + return ( + self.is_domain_aware() + or CONF.identity.domain_specific_drivers_enabled + ) def generates_uuids(self): """Indicate if Driver generates UUIDs as the local entity ID.""" diff --git a/keystone/identity/backends/ldap/common.py b/keystone/identity/backends/ldap/common.py index 1a2cfa5db2..21ffbde5a9 100644 --- a/keystone/identity/backends/ldap/common.py +++ b/keystone/identity/backends/ldap/common.py @@ -35,16 +35,19 @@ from keystone.i18n import _ LOG = log.getLogger(__name__) LDAP_VALUES = {'TRUE': True, 'FALSE': False} -LDAP_SCOPES = {'one': ldap.SCOPE_ONELEVEL, - 'sub': ldap.SCOPE_SUBTREE} -LDAP_DEREF = {'always': ldap.DEREF_ALWAYS, - 'default': None, - 'finding': ldap.DEREF_FINDING, - 'never': ldap.DEREF_NEVER, - 'searching': ldap.DEREF_SEARCHING} -LDAP_TLS_CERTS = {'never': ldap.OPT_X_TLS_NEVER, - 'demand': ldap.OPT_X_TLS_DEMAND, - 'allow': ldap.OPT_X_TLS_ALLOW} +LDAP_SCOPES = {'one': ldap.SCOPE_ONELEVEL, 'sub': ldap.SCOPE_SUBTREE} +LDAP_DEREF = { + 'always': ldap.DEREF_ALWAYS, + 'default': None, + 'finding': ldap.DEREF_FINDING, + 'never': ldap.DEREF_NEVER, + 'searching': ldap.DEREF_SEARCHING, +} +LDAP_TLS_CERTS = { + 'never': ldap.OPT_X_TLS_NEVER, + 'demand': ldap.OPT_X_TLS_DEMAND, + 'allow': ldap.OPT_X_TLS_ALLOW, +} # RFC 4511 (The LDAP Protocol) defines a list containing only the OID '1.1' to @@ -74,9 +77,9 @@ def utf8_encode(value): return value else: value_cls_name = reflection.get_class_name( - value, fully_qualified=False) - raise TypeError("value must be basestring, " - "not %s" % value_cls_name) + value, fully_qualified=False + ) + raise TypeError("value must be basestring, " "not %s" % value_cls_name) _utf8_decoder = codecs.getdecoder('utf-8') @@ -187,8 +190,10 @@ def convert_ldap_result(ldap_result): LOG.debug('Unable to decode value for attribute %s', kind) py_result.append((dn, ldap_attrs)) if at_least_one_referral: - LOG.debug('Referrals were returned and ignored. Enable referral ' - 'chasing in keystone.conf via [ldap] chase_referrals') + LOG.debug( + 'Referrals were returned and ignored. Enable referral ' + 'chasing in keystone.conf via [ldap] chase_referrals' + ) return py_result @@ -207,21 +212,29 @@ def parse_deref(opt): try: return LDAP_DEREF[opt] except KeyError: - raise ValueError(_('Invalid LDAP deref option: %(option)s. ' - 'Choose one of: %(options)s') % - {'option': opt, - 'options': ', '.join(LDAP_DEREF.keys()), }) + raise ValueError( + _( + 'Invalid LDAP deref option: %(option)s. ' + 'Choose one of: %(options)s' + ) + % { + 'option': opt, + 'options': ', '.join(LDAP_DEREF.keys()), + } + ) def parse_tls_cert(opt): try: return LDAP_TLS_CERTS[opt] except KeyError: - raise ValueError(_( - 'Invalid LDAP TLS certs option: %(option)s. ' - 'Choose one of: %(options)s') % { - 'option': opt, - 'options': ', '.join(LDAP_TLS_CERTS.keys())}) + raise ValueError( + _( + 'Invalid LDAP TLS certs option: %(option)s. ' + 'Choose one of: %(options)s' + ) + % {'option': opt, 'options': ', '.join(LDAP_TLS_CERTS.keys())} + ) def ldap_scope(scope): @@ -229,9 +242,9 @@ def ldap_scope(scope): return LDAP_SCOPES[scope] except KeyError: raise ValueError( - _('Invalid LDAP scope: %(scope)s. Choose one of: %(options)s') % { - 'scope': scope, - 'options': ', '.join(LDAP_SCOPES.keys())}) + _('Invalid LDAP scope: %(scope)s. Choose one of: %(options)s') + % {'scope': scope, 'options': ', '.join(LDAP_SCOPES.keys())} + ) def prep_case_insensitive(value): @@ -337,7 +350,7 @@ def dn_startswith(descendant_dn, dn): return False # Use the last len(dn) RDNs. - return is_dn_equal(descendant_dn[-len(dn):], dn) + return is_dn_equal(descendant_dn[-len(dn) :], dn) class LDAPHandler(object, metaclass=abc.ABCMeta): @@ -441,12 +454,25 @@ class LDAPHandler(object, metaclass=abc.ABCMeta): self.conn = conn @abc.abstractmethod - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert=ldap.OPT_X_TLS_DEMAND, chase_referrals=None, - debug_level=None, conn_timeout=None, use_pool=None, - pool_size=None, pool_retry_max=None, pool_retry_delay=None, - pool_conn_timeout=None, pool_conn_lifetime=None): + def connect( + self, + url, + page_size=0, + alias_dereferencing=None, + use_tls=False, + tls_cacertfile=None, + tls_cacertdir=None, + tls_req_cert=ldap.OPT_X_TLS_DEMAND, + chase_referrals=None, + debug_level=None, + conn_timeout=None, + use_pool=None, + pool_size=None, + pool_retry_max=None, + pool_retry_delay=None, + pool_conn_timeout=None, + pool_conn_lifetime=None, + ): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod @@ -458,8 +484,9 @@ class LDAPHandler(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): + def simple_bind_s( + self, who='', cred='', serverctrls=None, clientctrls=None + ): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod @@ -471,20 +498,35 @@ class LDAPHandler(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def search_s(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): + def search_s( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + ): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): + def search_ext( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + serverctrls=None, + clientctrls=None, + timeout=-1, + sizelimit=0, + ): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None, - resp_ctrl_classes=None): + def result3( + self, msgid=ldap.RES_ANY, all=1, timeout=None, resp_ctrl_classes=None + ): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod @@ -508,20 +550,35 @@ class PythonLDAPHandler(LDAPHandler): """ - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert=ldap.OPT_X_TLS_DEMAND, chase_referrals=None, - debug_level=None, conn_timeout=None, use_pool=None, - pool_size=None, pool_retry_max=None, pool_retry_delay=None, - pool_conn_timeout=None, pool_conn_lifetime=None): + def connect( + self, + url, + page_size=0, + alias_dereferencing=None, + use_tls=False, + tls_cacertfile=None, + tls_cacertdir=None, + tls_req_cert=ldap.OPT_X_TLS_DEMAND, + chase_referrals=None, + debug_level=None, + conn_timeout=None, + use_pool=None, + pool_size=None, + pool_retry_max=None, + pool_retry_delay=None, + pool_conn_timeout=None, + pool_conn_lifetime=None, + ): - _common_ldap_initialization(url=url, - use_tls=use_tls, - tls_cacertfile=tls_cacertfile, - tls_cacertdir=tls_cacertdir, - tls_req_cert=tls_req_cert, - debug_level=debug_level, - timeout=conn_timeout) + _common_ldap_initialization( + url=url, + use_tls=use_tls, + tls_cacertfile=tls_cacertfile, + tls_cacertdir=tls_cacertdir, + tls_req_cert=tls_req_cert, + debug_level=debug_level, + timeout=conn_timeout, + ) self.conn = ldap.initialize(url) self.conn.protocol_version = ldap.VERSION3 @@ -542,8 +599,9 @@ class PythonLDAPHandler(LDAPHandler): def get_option(self, option): return self.conn.get_option(option) - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): + def simple_bind_s( + self, who='', cred='', serverctrls=None, clientctrls=None + ): return self.conn.simple_bind_s(who, cred, serverctrls, clientctrls) def unbind_s(self): @@ -552,22 +610,43 @@ class PythonLDAPHandler(LDAPHandler): def add_s(self, dn, modlist): return self.conn.add_s(dn, modlist) - def search_s(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): - return self.conn.search_s(base, scope, filterstr, - attrlist, attrsonly) + def search_s( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + ): + return self.conn.search_s(base, scope, filterstr, attrlist, attrsonly) - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): - return self.conn.search_ext(base, scope, - filterstr, attrlist, attrsonly, - serverctrls, clientctrls, - timeout, sizelimit) + def search_ext( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + serverctrls=None, + clientctrls=None, + timeout=-1, + sizelimit=0, + ): + return self.conn.search_ext( + base, + scope, + filterstr, + attrlist, + attrsonly, + serverctrls, + clientctrls, + timeout, + sizelimit, + ) - def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None, - resp_ctrl_classes=None): + def result3( + self, msgid=ldap.RES_ANY, all=1, timeout=None, resp_ctrl_classes=None + ): # The resp_ctrl_classes parameter is a recent addition to the # API. It defaults to None. We do not anticipate using it. # To run with older versions of python-ldap we do not pass it. @@ -577,15 +656,26 @@ class PythonLDAPHandler(LDAPHandler): return self.conn.modify_s(dn, modlist) -def _common_ldap_initialization(url, use_tls=False, tls_cacertfile=None, - tls_cacertdir=None, tls_req_cert=None, - debug_level=None, timeout=None): +def _common_ldap_initialization( + url, + use_tls=False, + tls_cacertfile=None, + tls_cacertdir=None, + tls_req_cert=None, + debug_level=None, + timeout=None, +): """LDAP initialization for PythonLDAPHandler and PooledLDAPHandler.""" LOG.debug('LDAP init: url=%s', url) - LOG.debug('LDAP init: use_tls=%s tls_cacertfile=%s tls_cacertdir=%s ' - 'tls_req_cert=%s tls_avail=%s', - use_tls, tls_cacertfile, tls_cacertdir, - tls_req_cert, ldap.TLS_AVAIL) + LOG.debug( + 'LDAP init: use_tls=%s tls_cacertfile=%s tls_cacertdir=%s ' + 'tls_req_cert=%s tls_avail=%s', + use_tls, + tls_cacertfile, + tls_cacertdir, + tls_req_cert, + ldap.TLS_AVAIL, + ) if debug_level is not None: ldap.set_option(ldap.OPT_DEBUG_LEVEL, debug_level) @@ -602,12 +692,18 @@ def _common_ldap_initialization(url, use_tls=False, tls_cacertfile=None, # The certificate trust options apply for both LDAPS and TLS. if use_tls or using_ldaps: if not ldap.TLS_AVAIL: - raise ValueError(_('Invalid LDAP TLS_AVAIL option: %s. TLS ' - 'not available') % ldap.TLS_AVAIL) + raise ValueError( + _('Invalid LDAP TLS_AVAIL option: %s. TLS ' 'not available') + % ldap.TLS_AVAIL + ) if not tls_cacertfile and not tls_cacertdir: - raise ValueError(_('You need to set tls_cacertfile or ' - 'tls_cacertdir if use_tls is true or ' - 'url uses ldaps: scheme.')) + raise ValueError( + _( + 'You need to set tls_cacertfile or ' + 'tls_cacertdir if use_tls is true or ' + 'url uses ldaps: scheme.' + ) + ) if tls_cacertfile: # NOTE(topol) @@ -617,9 +713,10 @@ def _common_ldap_initialization(url, use_tls=False, tls_cacertfile=None, # works but these values are ignored when setting them on the # connection if not os.path.isfile(tls_cacertfile): - raise IOError(_("tls_cacertfile %s not found " - "or is not a file") % - tls_cacertfile) + raise IOError( + _("tls_cacertfile %s not found " "or is not a file") + % tls_cacertfile + ) ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile) elif tls_cacertdir: # NOTE(topol) @@ -629,15 +726,17 @@ def _common_ldap_initialization(url, use_tls=False, tls_cacertfile=None, # works but these values are ignored when setting them on the # connection if not os.path.isdir(tls_cacertdir): - raise IOError(_("tls_cacertdir %s not found " - "or is not a directory") % - tls_cacertdir) + raise IOError( + _("tls_cacertdir %s not found " "or is not a directory") + % tls_cacertdir + ) ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir) if tls_req_cert in list(LDAP_TLS_CERTS.values()): ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert) else: - LOG.debug('LDAP TLS: invalid TLS_REQUIRE_CERT Option=%s', - tls_req_cert) + LOG.debug( + 'LDAP TLS: invalid TLS_REQUIRE_CERT Option=%s', tls_req_cert + ) class AsynchronousMessage(object): @@ -678,11 +777,13 @@ def use_conn_pool(func): This adds connection object to decorated API as next argument after self. """ + def wrapper(self, *args, **kwargs): # assert isinstance(self, PooledLDAPHandler) with self._get_pool_connection() as conn: self._apply_options(conn) return func(self, conn, *args, **kwargs) + return wrapper @@ -730,20 +831,35 @@ class PooledLDAPHandler(LDAPHandler): self.use_auth_pool = use_auth_pool self.conn_pool = None - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert=ldap.OPT_X_TLS_DEMAND, chase_referrals=None, - debug_level=None, conn_timeout=None, use_pool=None, - pool_size=None, pool_retry_max=None, pool_retry_delay=None, - pool_conn_timeout=None, pool_conn_lifetime=None): + def connect( + self, + url, + page_size=0, + alias_dereferencing=None, + use_tls=False, + tls_cacertfile=None, + tls_cacertdir=None, + tls_req_cert=ldap.OPT_X_TLS_DEMAND, + chase_referrals=None, + debug_level=None, + conn_timeout=None, + use_pool=None, + pool_size=None, + pool_retry_max=None, + pool_retry_delay=None, + pool_conn_timeout=None, + pool_conn_lifetime=None, + ): - _common_ldap_initialization(url=url, - use_tls=use_tls, - tls_cacertfile=tls_cacertfile, - tls_cacertdir=tls_cacertdir, - tls_req_cert=tls_req_cert, - debug_level=debug_level, - timeout=pool_conn_timeout) + _common_ldap_initialization( + url=url, + use_tls=use_tls, + tls_cacertfile=tls_cacertfile, + tls_cacertdir=tls_cacertdir, + tls_req_cert=tls_req_cert, + debug_level=debug_level, + timeout=pool_conn_timeout, + ) self.page_size = page_size @@ -769,7 +885,8 @@ class PooledLDAPHandler(LDAPHandler): timeout=pool_conn_timeout, connector_cls=self.Connector, use_tls=use_tls, - max_lifetime=pool_conn_lifetime) + max_lifetime=pool_conn_lifetime, + ) self.connection_pools[pool_url] = self.conn_pool def set_option(self, option, invalue): @@ -794,8 +911,9 @@ class PooledLDAPHandler(LDAPHandler): def _get_pool_connection(self): return self.conn_pool.connection(self.who, self.cred) - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): + def simple_bind_s( + self, who='', cred='', serverctrls=None, clientctrls=None + ): # Not using use_conn_pool decorator here as this API takes cred as # input. self.who = who @@ -814,15 +932,29 @@ class PooledLDAPHandler(LDAPHandler): return conn.add_s(dn, modlist) @use_conn_pool - def search_s(self, conn, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): - return conn.search_s(base, scope, filterstr, attrlist, - attrsonly) + def search_s( + self, + conn, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + ): + return conn.search_s(base, scope, filterstr, attrlist, attrsonly) - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): + def search_ext( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + serverctrls=None, + clientctrls=None, + timeout=-1, + sizelimit=0, + ): """Return an AsynchronousMessage instance, it asynchronous API. The AsynchronousMessage instance can be safely used in a call to @@ -839,17 +971,23 @@ class PooledLDAPHandler(LDAPHandler): conn_ctxt = self._get_pool_connection() conn = conn_ctxt.__enter__() try: - msgid = conn.search_ext(base, scope, - filterstr, attrlist, attrsonly, - serverctrls, clientctrls, - timeout, sizelimit) + msgid = conn.search_ext( + base, + scope, + filterstr, + attrlist, + attrsonly, + serverctrls, + clientctrls, + timeout, + sizelimit, + ) except Exception: conn_ctxt.__exit__(*sys.exc_info()) raise return AsynchronousMessage(msgid, conn, conn_ctxt) - def result3(self, message, all=1, timeout=None, - resp_ctrl_classes=None): + def result3(self, message, all=1, timeout=None, resp_ctrl_classes=None): """Wait for and return the result to an asynchronous message. This method returns the result of an operation previously initiated by @@ -930,24 +1068,44 @@ class KeystoneLDAPHandler(LDAPHandler): # Disable the pagination from now on self.page_size = 0 - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert=ldap.OPT_X_TLS_DEMAND, chase_referrals=None, - debug_level=None, conn_timeout=None, use_pool=None, - pool_size=None, pool_retry_max=None, pool_retry_delay=None, - pool_conn_timeout=None, pool_conn_lifetime=None): + def connect( + self, + url, + page_size=0, + alias_dereferencing=None, + use_tls=False, + tls_cacertfile=None, + tls_cacertdir=None, + tls_req_cert=ldap.OPT_X_TLS_DEMAND, + chase_referrals=None, + debug_level=None, + conn_timeout=None, + use_pool=None, + pool_size=None, + pool_retry_max=None, + pool_retry_delay=None, + pool_conn_timeout=None, + pool_conn_lifetime=None, + ): self.page_size = page_size - return self.conn.connect(url, page_size, alias_dereferencing, - use_tls, tls_cacertfile, tls_cacertdir, - tls_req_cert, chase_referrals, - debug_level=debug_level, - conn_timeout=conn_timeout, - use_pool=use_pool, - pool_size=pool_size, - pool_retry_max=pool_retry_max, - pool_retry_delay=pool_retry_delay, - pool_conn_timeout=pool_conn_timeout, - pool_conn_lifetime=pool_conn_lifetime) + return self.conn.connect( + url, + page_size, + alias_dereferencing, + use_tls, + tls_cacertfile, + tls_cacertdir, + tls_req_cert, + chase_referrals, + debug_level=debug_level, + conn_timeout=conn_timeout, + use_pool=use_pool, + pool_size=pool_size, + pool_retry_max=pool_retry_max, + pool_retry_delay=pool_retry_delay, + pool_conn_timeout=pool_conn_timeout, + pool_conn_lifetime=pool_conn_lifetime, + ) def set_option(self, option, invalue): return self.conn.set_option(option, invalue) @@ -955,48 +1113,66 @@ class KeystoneLDAPHandler(LDAPHandler): def get_option(self, option): return self.conn.get_option(option) - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): + def simple_bind_s( + self, who='', cred='', serverctrls=None, clientctrls=None + ): LOG.debug('LDAP bind: who=%s', who) - return self.conn.simple_bind_s(who, cred, - serverctrls=serverctrls, - clientctrls=clientctrls) + return self.conn.simple_bind_s( + who, cred, serverctrls=serverctrls, clientctrls=clientctrls + ) def unbind_s(self): LOG.debug('LDAP unbind') return self.conn.unbind_s() def add_s(self, dn, modlist): - ldap_attrs = [(kind, [py2ldap(x) for x in safe_iter(values)]) - for kind, values in modlist] - logging_attrs = [(kind, values - if kind != 'userPassword' - else ['****']) - for kind, values in ldap_attrs] - LOG.debug('LDAP add: dn=%s attrs=%s', - dn, logging_attrs) - ldap_attrs_utf8 = [(kind, [utf8_encode(x) for x in safe_iter(values)]) - for kind, values in ldap_attrs] + ldap_attrs = [ + (kind, [py2ldap(x) for x in safe_iter(values)]) + for kind, values in modlist + ] + logging_attrs = [ + (kind, values if kind != 'userPassword' else ['****']) + for kind, values in ldap_attrs + ] + LOG.debug('LDAP add: dn=%s attrs=%s', dn, logging_attrs) + ldap_attrs_utf8 = [ + (kind, [utf8_encode(x) for x in safe_iter(values)]) + for kind, values in ldap_attrs + ] return self.conn.add_s(dn, ldap_attrs_utf8) - def search_s(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): + def search_s( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + ): # NOTE(morganfainberg): Remove "None" singletons from this list, which # allows us to set mapped attributes to "None" as defaults in config. # Without this filtering, the ldap query would raise a TypeError since # attrlist is expected to be an iterable of strings. if attrlist is not None: attrlist = [attr for attr in attrlist if attr is not None] - LOG.debug('LDAP search: base=%s scope=%s filterstr=%s ' - 'attrs=%s attrsonly=%s', - base, scope, filterstr, attrlist, attrsonly) + LOG.debug( + 'LDAP search: base=%s scope=%s filterstr=%s ' + 'attrs=%s attrsonly=%s', + base, + scope, + filterstr, + attrlist, + attrsonly, + ) if self.page_size: - ldap_result = self._paged_search_s(base, scope, - filterstr, attrlist) + ldap_result = self._paged_search_s( + base, scope, filterstr, attrlist + ) else: try: - ldap_result = self.conn.search_s(base, scope, filterstr, - attrlist, attrsonly) + ldap_result = self.conn.search_s( + base, scope, filterstr, attrlist, attrsonly + ) except ldap.SIZELIMIT_EXCEEDED: raise exception.LDAPSizeLimitExceeded() @@ -1004,21 +1180,45 @@ class KeystoneLDAPHandler(LDAPHandler): return py_result - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): + def search_ext( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + serverctrls=None, + clientctrls=None, + timeout=-1, + sizelimit=0, + ): if attrlist is not None: attrlist = [attr for attr in attrlist if attr is not None] - LOG.debug('LDAP search_ext: base=%s scope=%s filterstr=%s ' - 'attrs=%s attrsonly=%s ' - 'serverctrls=%s clientctrls=%s timeout=%s sizelimit=%s', - base, scope, filterstr, attrlist, attrsonly, - serverctrls, clientctrls, timeout, sizelimit) - return self.conn.search_ext(base, scope, - filterstr, attrlist, attrsonly, - serverctrls, clientctrls, - timeout, sizelimit) + LOG.debug( + 'LDAP search_ext: base=%s scope=%s filterstr=%s ' + 'attrs=%s attrsonly=%s ' + 'serverctrls=%s clientctrls=%s timeout=%s sizelimit=%s', + base, + scope, + filterstr, + attrlist, + attrsonly, + serverctrls, + clientctrls, + timeout, + sizelimit, + ) + return self.conn.search_ext( + base, + scope, + filterstr, + attrlist, + attrsonly, + serverctrls, + clientctrls, + timeout, + sizelimit, + ) def _paged_search_s(self, base, scope, filterstr, attrlist=None): res = [] @@ -1031,28 +1231,25 @@ class KeystoneLDAPHandler(LDAPHandler): lc = ldap.controls.SimplePagedResultsControl( controlType=ldap.LDAP_CONTROL_PAGE_OID, criticality=True, - controlValue=(self.page_size, '')) + controlValue=(self.page_size, ''), + ) page_ctrl_oid = ldap.LDAP_CONTROL_PAGE_OID else: lc = ldap.controls.libldap.SimplePagedResultsControl( - criticality=True, - size=self.page_size, - cookie='') + criticality=True, size=self.page_size, cookie='' + ) page_ctrl_oid = ldap.controls.SimplePagedResultsControl.controlType - message = self.conn.search_ext(base, - scope, - filterstr, - attrlist, - serverctrls=[lc]) + message = self.conn.search_ext( + base, scope, filterstr, attrlist, serverctrls=[lc] + ) # Endless loop request pages on ldap server until it has no data while True: # Request to the ldap server a page with 'page_size' entries rtype, rdata, rmsgid, serverctrls = self.conn.result3(message) # Receive the data res.extend(rdata) - pctrls = [c for c in serverctrls - if c.controlType == page_ctrl_oid] + pctrls = [c for c in serverctrls if c.controlType == page_ctrl_oid] if pctrls: # LDAP server supports pagination if use_old_paging_api: @@ -1064,29 +1261,36 @@ class KeystoneLDAPHandler(LDAPHandler): if cookie: # There is more data still on the server # so we request another page - message = self.conn.search_ext(base, - scope, - filterstr, - attrlist, - serverctrls=[lc]) + message = self.conn.search_ext( + base, scope, filterstr, attrlist, serverctrls=[lc] + ) else: # Exit condition no more data on server break else: - LOG.warning('LDAP Server does not support paging. ' - 'Disable paging in keystone.conf to ' - 'avoid this message.') + LOG.warning( + 'LDAP Server does not support paging. ' + 'Disable paging in keystone.conf to ' + 'avoid this message.' + ) self._disable_paging() break return res - def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None, - resp_ctrl_classes=None): + def result3( + self, msgid=ldap.RES_ANY, all=1, timeout=None, resp_ctrl_classes=None + ): ldap_result = self.conn.result3(msgid, all, timeout, resp_ctrl_classes) - LOG.debug('LDAP result3: msgid=%s all=%s timeout=%s ' - 'resp_ctrl_classes=%s ldap_result=%s', - msgid, all, timeout, resp_ctrl_classes, ldap_result) + LOG.debug( + 'LDAP result3: msgid=%s all=%s timeout=%s ' + 'resp_ctrl_classes=%s ldap_result=%s', + msgid, + all, + timeout, + resp_ctrl_classes, + ldap_result, + ) # ldap_result returned from result3 is a tuple of # (rtype, rdata, rmsgid, serverctrls). We don't need use of these, @@ -1097,20 +1301,36 @@ class KeystoneLDAPHandler(LDAPHandler): def modify_s(self, dn, modlist): ldap_modlist = [ - (op, kind, (None if values is None - else [py2ldap(x) for x in safe_iter(values)])) - for op, kind, values in modlist] + ( + op, + kind, + ( + None + if values is None + else [py2ldap(x) for x in safe_iter(values)] + ), + ) + for op, kind, values in modlist + ] - logging_modlist = [(op, kind, (values if kind != 'userPassword' - else ['****'])) - for op, kind, values in ldap_modlist] - LOG.debug('LDAP modify: dn=%s modlist=%s', - dn, logging_modlist) + logging_modlist = [ + (op, kind, (values if kind != 'userPassword' else ['****'])) + for op, kind, values in ldap_modlist + ] + LOG.debug('LDAP modify: dn=%s modlist=%s', dn, logging_modlist) ldap_modlist_utf8 = [ - (op, kind, (None if values is None - else [utf8_encode(x) for x in safe_iter(values)])) - for op, kind, values in ldap_modlist] + ( + op, + kind, + ( + None + if values is None + else [utf8_encode(x) for x in safe_iter(values)] + ), + ) + for op, kind, values in ldap_modlist + ] return self.conn.modify_s(dn, ldap_modlist_utf8) def __exit__(self, exc_type, exc_val, exc_tb): @@ -1202,30 +1422,35 @@ class BaseLdap(object): self.auth_pool_conn_lifetime = conf.ldap.auth_pool_connection_lifetime if self.options_name is not None: - self.tree_dn = ( - getattr(conf.ldap, '%s_tree_dn' % self.options_name) - or '%s,%s' % (self.DEFAULT_OU, conf.ldap.suffix)) + self.tree_dn = getattr( + conf.ldap, '%s_tree_dn' % self.options_name + ) or '%s,%s' % (self.DEFAULT_OU, conf.ldap.suffix) idatt = '%s_id_attribute' % self.options_name self.id_attr = getattr(conf.ldap, idatt) or self.DEFAULT_ID_ATTR objclass = '%s_objectclass' % self.options_name - self.object_class = (getattr(conf.ldap, objclass) - or self.DEFAULT_OBJECTCLASS) + self.object_class = ( + getattr(conf.ldap, objclass) or self.DEFAULT_OBJECTCLASS + ) for k, v in self.attribute_options_names.items(): v = '%s_%s_attribute' % (self.options_name, v) self.attribute_mapping[k] = getattr(conf.ldap, v) - attr_mapping_opt = ('%s_additional_attribute_mapping' % - self.options_name) - attr_mapping = (getattr(conf.ldap, attr_mapping_opt) - or self.DEFAULT_EXTRA_ATTR_MAPPING) + attr_mapping_opt = ( + '%s_additional_attribute_mapping' % self.options_name + ) + attr_mapping = ( + getattr(conf.ldap, attr_mapping_opt) + or self.DEFAULT_EXTRA_ATTR_MAPPING + ) self.extra_attr_mapping = self._parse_extra_attrs(attr_mapping) ldap_filter = '%s_filter' % self.options_name - self.ldap_filter = getattr(conf.ldap, - ldap_filter) or self.DEFAULT_FILTER + self.ldap_filter = ( + getattr(conf.ldap, ldap_filter) or self.DEFAULT_FILTER + ) member_attribute = '%s_member_attribute' % self.options_name self.member_attribute = getattr(conf.ldap, member_attribute, None) @@ -1254,7 +1479,8 @@ class BaseLdap(object): LOG.warning( 'Invalid additional attribute mapping: "%s". ' 'Format must be :', - item) + item, + ) continue mapping[ldap_attr] = attr_map return mapping @@ -1271,8 +1497,9 @@ class BaseLdap(object): pool_size = self.auth_pool_size pool_conn_lifetime = self.auth_pool_conn_lifetime - conn = _get_connection(self.LDAP_URL, use_pool, - use_auth_pool=end_user_auth) + conn = _get_connection( + self.LDAP_URL, use_pool, use_auth_pool=end_user_auth + ) conn = KeystoneLDAPHandler(conn=conn) @@ -1280,22 +1507,24 @@ class BaseLdap(object): # exist. If that is the case, the bind attempt will # fail with a server down exception. try: - conn.connect(self.LDAP_URL, - page_size=self.page_size, - alias_dereferencing=self.alias_dereferencing, - use_tls=self.use_tls, - tls_cacertfile=self.tls_cacertfile, - tls_cacertdir=self.tls_cacertdir, - tls_req_cert=self.tls_req_cert, - chase_referrals=self.chase_referrals, - debug_level=self.debug_level, - conn_timeout=self.conn_timeout, - use_pool=use_pool, - pool_size=pool_size, - pool_retry_max=self.pool_retry_max, - pool_retry_delay=self.pool_retry_delay, - pool_conn_timeout=self.pool_conn_timeout, - pool_conn_lifetime=pool_conn_lifetime) + conn.connect( + self.LDAP_URL, + page_size=self.page_size, + alias_dereferencing=self.alias_dereferencing, + use_tls=self.use_tls, + tls_cacertfile=self.tls_cacertfile, + tls_cacertdir=self.tls_cacertdir, + tls_req_cert=self.tls_req_cert, + chase_referrals=self.chase_referrals, + debug_level=self.debug_level, + conn_timeout=self.conn_timeout, + use_pool=use_pool, + pool_size=pool_size, + pool_retry_max=self.pool_retry_max, + pool_retry_delay=self.pool_retry_delay, + pool_conn_timeout=self.pool_conn_timeout, + pool_conn_lifetime=pool_conn_lifetime, + ) if user is None: user = self.LDAP_USER @@ -1314,27 +1543,30 @@ class BaseLdap(object): except ldap.INVALID_CREDENTIALS: raise exception.LDAPInvalidCredentialsError() except ldap.SERVER_DOWN: - raise exception.LDAPServerConnectionError( - url=self.LDAP_URL) + raise exception.LDAPServerConnectionError(url=self.LDAP_URL) def _id_to_dn_string(self, object_id): - return u'%s=%s,%s' % (self.id_attr, - ldap.dn.escape_dn_chars( - str(object_id)), - self.tree_dn) + return u'%s=%s,%s' % ( + self.id_attr, + ldap.dn.escape_dn_chars(str(object_id)), + self.tree_dn, + ) def _id_to_dn(self, object_id): if self.LDAP_SCOPE == ldap.SCOPE_ONELEVEL: return self._id_to_dn_string(object_id) with self.get_connection() as conn: search_result = conn.search_s( - self.tree_dn, self.LDAP_SCOPE, - u'(&(%(id_attr)s=%(id)s)(objectclass=%(objclass)s))' % - {'id_attr': self.id_attr, - 'id': ldap.filter.escape_filter_chars( - str(object_id)), - 'objclass': self.object_class}, - attrlist=DN_ONLY) + self.tree_dn, + self.LDAP_SCOPE, + u'(&(%(id_attr)s=%(id)s)(objectclass=%(objclass)s))' + % { + 'id_attr': self.id_attr, + 'id': ldap.filter.escape_filter_chars(str(object_id)), + 'objclass': self.object_class, + }, + attrlist=DN_ONLY, + ) if search_result: dn, attrs = search_result[0] return dn @@ -1356,21 +1588,25 @@ class BaseLdap(object): try: id_list = search_result[0][1][self.id_attr] except KeyError: - message = ('ID attribute %(id_attr)s not found in LDAP ' - 'object %(dn)s.') % ({'id_attr': self.id_attr, - 'dn': search_result}) + message = ( + 'ID attribute %(id_attr)s not found in LDAP ' + 'object %(dn)s.' + ) % ({'id_attr': self.id_attr, 'dn': search_result}) LOG.warning(message) raise exception.NotFound(message=message) if len(id_list) > 1: - message = ('In order to keep backward compatibility, in ' - 'the case of multivalued ids, we are ' - 'returning the first id %(id_attr)s in the ' - 'DN.') % ({'id_attr': id_list[0]}) + message = ( + 'In order to keep backward compatibility, in ' + 'the case of multivalued ids, we are ' + 'returning the first id %(id_attr)s in the ' + 'DN.' + ) % ({'id_attr': id_list[0]}) LOG.warning(message) return id_list[0] else: message = _('DN attribute %(dn)s not found in LDAP') % ( - {'dn': dn}) + {'dn': dn} + ) raise exception.NotFound(message=message) def _ldap_res_to_model(self, res): @@ -1383,9 +1619,9 @@ class BaseLdap(object): id_attrs = lower_res.get(self.id_attr.lower()) if not id_attrs: - message = _('ID attribute %(id_attr)s not found in LDAP ' - 'object %(dn)s') % ({'id_attr': self.id_attr, - 'dn': res[0]}) + message = _( + 'ID attribute %(id_attr)s not found in LDAP ' 'object %(dn)s' + ) % ({'id_attr': self.id_attr, 'dn': res[0]}) raise exception.NotFound(message=message) if len(id_attrs) > 1: # FIXME(gyee): if this is a multi-value attribute and it has @@ -1393,10 +1629,11 @@ class BaseLdap(object): # logic here so it does not potentially break existing # deployments. We need to fix our read-write LDAP logic so # it does not get the ID from DN. - message = ('ID attribute %(id_attr)s for LDAP object %(dn)s ' - 'has multiple values and therefore cannot be used ' - 'as an ID. Will get the ID from DN instead') % ( - {'id_attr': self.id_attr, 'dn': res[0]}) + message = ( + 'ID attribute %(id_attr)s for LDAP object %(dn)s ' + 'has multiple values and therefore cannot be used ' + 'as an ID. Will get the ID from DN instead' + ) % ({'id_attr': self.id_attr, 'dn': res[0]}) LOG.warning(message) id_val = self._dn_to_id(res[0]) else: @@ -1433,8 +1670,11 @@ class BaseLdap(object): try: value = value.decode('utf-8') except UnicodeDecodeError: - LOG.error("Error decoding value %r (object id %r).", - value, res[0]) + LOG.error( + "Error decoding value %r (object id %r).", + value, + res[0], + ) raise obj[k] = value @@ -1448,9 +1688,10 @@ class BaseLdap(object): # Didn't find it so it's unique, good. pass else: - raise exception.Conflict(type=self.options_name, - details=_('Duplicate name, %s.') % - values['name']) + raise exception.Conflict( + type=self.options_name, + details=_('Duplicate name, %s.') % values['name'], + ) if values.get('id') is not None: try: @@ -1459,9 +1700,10 @@ class BaseLdap(object): # Didn't find it, so it's unique, good. pass else: - raise exception.Conflict(type=self.options_name, - details=_('Duplicate ID, %s.') % - values['id']) + raise exception.Conflict( + type=self.options_name, + details=_('Duplicate ID, %s.') % values['id'], + ) def create(self, values): self.affirm_unique(values) @@ -1478,9 +1720,11 @@ class BaseLdap(object): attr_type = self.attribute_mapping.get(k, k) if attr_type is not None: attrs.append((attr_type, [v])) - extra_attrs = [attr for attr, name - in self.extra_attr_mapping.items() - if name == k] + extra_attrs = [ + attr + for attr, name in self.extra_attr_mapping.items() + if name == k + ] for attr in extra_attrs: attrs.append((attr, [v])) @@ -1499,12 +1743,14 @@ class BaseLdap(object): # To ensure that ldap attribute value is not empty in ldap config. if not attr: - attr_name = ('%s_%s_attribute' % - (self.options_name, - self.attribute_options_names[ldap_attr_name])) - raise ValueError('"%(attr)s" is not a valid value for' - ' "%(attr_name)s"' % {'attr': attr, - 'attr_name': attr_name}) + attr_name = '%s_%s_attribute' % ( + self.options_name, + self.attribute_options_names[ldap_attr_name], + ) + raise ValueError( + '"%(attr)s" is not a valid value for' + ' "%(attr_name)s"' % {'attr': attr, 'attr_name': attr_name} + ) # consider attr = "cn" and # ldap_result = [{'uid': ['fake_id1']}, , 'cN': ["name"]}] @@ -1536,23 +1782,31 @@ class BaseLdap(object): return result def _ldap_get(self, object_id, ldap_filter=None): - query = (u'(&(%(id_attr)s=%(id)s)' - u'%(filter)s' - u'(objectClass=%(object_class)s))' - % {'id_attr': self.id_attr, - 'id': ldap.filter.escape_filter_chars( - str(object_id)), - 'filter': (ldap_filter or self.ldap_filter or ''), - 'object_class': self.object_class}) + query = ( + u'(&(%(id_attr)s=%(id)s)' + u'%(filter)s' + u'(objectClass=%(object_class)s))' + % { + 'id_attr': self.id_attr, + 'id': ldap.filter.escape_filter_chars(str(object_id)), + 'filter': (ldap_filter or self.ldap_filter or ''), + 'object_class': self.object_class, + } + ) with self.get_connection() as conn: try: - attrs = list(set(([self.id_attr] + - list(self.attribute_mapping.values()) + - list(self.extra_attr_mapping.keys())))) - res = conn.search_s(self.tree_dn, - self.LDAP_SCOPE, - query, - attrs) + attrs = list( + set( + ( + [self.id_attr] + + list(self.attribute_mapping.values()) + + list(self.extra_attr_mapping.keys()) + ) + ) + ) + res = conn.search_s( + self.tree_dn, self.LDAP_SCOPE, query, attrs + ) except ldap.NO_SUCH_OBJECT: return None @@ -1572,11 +1826,11 @@ class BaseLdap(object): with self.get_connection() as conn: try: control = ldap.controls.libldap.SimplePagedResultsControl( - criticality=True, - size=sizelimit, - cookie='') - msgid = conn.search_ext(base, scope, filterstr, attrlist, - serverctrls=[control]) + criticality=True, size=sizelimit, cookie='' + ) + msgid = conn.search_ext( + base, scope, filterstr, attrlist, serverctrls=[control] + ) rdata = conn.result3(msgid) return rdata except ldap.NO_SUCH_OBJECT: @@ -1587,25 +1841,29 @@ class BaseLdap(object): query = u'(&%s(objectClass=%s)(%s=*))' % ( ldap_filter or self.ldap_filter or '', self.object_class, - self.id_attr) + self.id_attr, + ) sizelimit = 0 - attrs = list(set(([self.id_attr] + - list(self.attribute_mapping.values()) + - list(self.extra_attr_mapping.keys())))) + attrs = list( + set( + ( + [self.id_attr] + + list(self.attribute_mapping.values()) + + list(self.extra_attr_mapping.keys()) + ) + ) + ) if hints.limit: sizelimit = hints.limit['limit'] - res = self._ldap_get_limited(self.tree_dn, - self.LDAP_SCOPE, - query, - attrs, - sizelimit) + res = self._ldap_get_limited( + self.tree_dn, self.LDAP_SCOPE, query, attrs, sizelimit + ) else: with self.get_connection() as conn: try: - res = conn.search_s(self.tree_dn, - self.LDAP_SCOPE, - query, - attrs) + res = conn.search_s( + self.tree_dn, self.LDAP_SCOPE, query, attrs + ) except ldap.NO_SUCH_OBJECT: return [] # TODO(prashkre): add functional testing for missing name attribute @@ -1617,8 +1875,9 @@ class BaseLdap(object): # compared to explicit filtering by 'name' through ldap result. return self._filter_ldap_result_by_attr(res, 'name') - def _ldap_get_list(self, search_base, scope, query_params=None, - attrlist=None): + def _ldap_get_list( + self, search_base, scope, query_params=None, attrlist=None + ): query = u'(objectClass=%s)' % self.object_class if query_params: @@ -1626,9 +1885,10 @@ class BaseLdap(object): val_esc = ldap.filter.escape_filter_chars(value) return '(%s=%s)' % (attrname, val_esc) - query = (u'(&%s%s)' % - (query, ''.join([calc_filter(k, v) for k, v in - query_params.items()]))) + query = u'(&%s%s)' % ( + query, + ''.join([calc_filter(k, v) for k, v in query_params.items()]), + ) with self.get_connection() as conn: return conn.search_s(search_base, scope, query, attrlist) @@ -1640,9 +1900,10 @@ class BaseLdap(object): return self._ldap_res_to_model(res) def get_by_name(self, name, ldap_filter=None): - query = (u'(%s=%s)' % (self.attribute_mapping['name'], - ldap.filter.escape_filter_chars( - str(name)))) + query = u'(%s=%s)' % ( + self.attribute_mapping['name'], + ldap.filter.escape_filter_chars(str(name)), + ) res = self.get_all(query) try: return res[0] @@ -1651,8 +1912,10 @@ class BaseLdap(object): def get_all(self, ldap_filter=None, hints=None): hints = hints or driver_hints.Hints() - return [self._ldap_res_to_model(x) - for x in self._ldap_get_all(hints, ldap_filter)] + return [ + self._ldap_res_to_model(x) + for x in self._ldap_get_all(hints, ldap_filter) + ] def update(self, object_id, values, old_obj=None): if old_obj is None: @@ -1668,8 +1931,10 @@ class BaseLdap(object): # Handle 'enabled' specially since can't disable if ignored. if k == 'enabled' and (not v): - action = _("Disabling an entity where the 'enable' " - "attribute is ignored by configuration.") + action = _( + "Disabling an entity where the 'enable' " + "attribute is ignored by configuration." + ) raise exception.ForbiddenAction(action=action) continue @@ -1679,15 +1944,21 @@ class BaseLdap(object): continue if k in self.immutable_attrs: - msg = (_("Cannot change %(option_name)s %(attr)s") % - {'option_name': self.options_name, 'attr': k}) + msg = _("Cannot change %(option_name)s %(attr)s") % { + 'option_name': self.options_name, + 'attr': k, + } raise exception.ValidationError(msg) if v is None: if old_obj.get(k) is not None: - modlist.append((ldap.MOD_DELETE, - self.attribute_mapping.get(k, k), - None)) + modlist.append( + ( + ldap.MOD_DELETE, + self.attribute_mapping.get(k, k), + None, + ) + ) continue current_value = old_obj.get(k) @@ -1722,11 +1993,14 @@ class BaseLdap(object): mod = (ldap.MOD_ADD, self.member_attribute, member_dn) conn.modify_s(member_list_dn, [mod]) except ldap.TYPE_OR_VALUE_EXISTS: - raise exception.Conflict(_('Member %(member)s ' - 'is already a member' - ' of group %(group)s') % { - 'member': member_dn, - 'group': member_list_dn}) + raise exception.Conflict( + _( + 'Member %(member)s ' + 'is already a member' + ' of group %(group)s' + ) + % {'member': member_dn, 'group': member_list_dn} + ) except ldap.NO_SUCH_OBJECT: raise self._not_found(member_list_dn) @@ -1743,6 +2017,7 @@ class BaseLdap(object): :returns query: LDAP query, updated with any filters satisfied """ + def build_filter(filter_): """Build a filter for the query. @@ -1778,17 +2053,25 @@ class BaseLdap(object): # booleans (this is related to bug #1411478). if filter_['comparator'] == 'equals': - query_term = (u'(%(attr)s=%(val)s)' - % {'attr': ldap_attr, 'val': val_esc}) + query_term = u'(%(attr)s=%(val)s)' % { + 'attr': ldap_attr, + 'val': val_esc, + } elif filter_['comparator'] == 'contains': - query_term = (u'(%(attr)s=*%(val)s*)' - % {'attr': ldap_attr, 'val': val_esc}) + query_term = u'(%(attr)s=*%(val)s*)' % { + 'attr': ldap_attr, + 'val': val_esc, + } elif filter_['comparator'] == 'startswith': - query_term = (u'(%(attr)s=%(val)s*)' - % {'attr': ldap_attr, 'val': val_esc}) + query_term = u'(%(attr)s=%(val)s*)' % { + 'attr': ldap_attr, + 'val': val_esc, + } elif filter_['comparator'] == 'endswith': - query_term = (u'(%(attr)s=*%(val)s)' - % {'attr': ldap_attr, 'val': val_esc}) + query_term = u'(%(attr)s=*%(val)s)' % { + 'attr': ldap_attr, + 'val': val_esc, + } else: # It's a filter we don't understand, so let the caller # work out if they need to do something with it. @@ -1854,8 +2137,9 @@ class EnabledEmuMixIn(BaseLdap): enabled_emulation_dn = '%s_enabled_emulation_dn' % self.options_name self.enabled_emulation_dn = getattr(conf.ldap, enabled_emulation_dn) - use_group_config = ('%s_enabled_emulation_use_group_config' % - self.options_name) + use_group_config = ( + '%s_enabled_emulation_use_group_config' % self.options_name + ) self.use_group_config = getattr(conf.ldap, use_group_config) if not self.use_group_config: @@ -1877,8 +2161,7 @@ class EnabledEmuMixIn(BaseLdap): # Extract the attribute name and value from the configured DN. naming_dn = ldap.dn.str2dn(self.enabled_emulation_dn) naming_rdn = naming_dn[0][0] - naming_attr = (naming_rdn[0], - naming_rdn[1]) + naming_attr = (naming_rdn[0], naming_rdn[1]) self.enabled_emulation_naming_attr = naming_attr def _id_to_member_attribute_value(self, object_id): @@ -1892,12 +2175,17 @@ class EnabledEmuMixIn(BaseLdap): return self._is_member_enabled(member_attr_val, conn) def _is_member_enabled(self, member_attr_val, conn): - query = '(%s=%s)' % (self.member_attribute, - ldap.filter.escape_filter_chars(member_attr_val)) + query = '(%s=%s)' % ( + self.member_attribute, + ldap.filter.escape_filter_chars(member_attr_val), + ) try: - enabled_value = conn.search_s(self.enabled_emulation_dn, - ldap.SCOPE_BASE, - query, attrlist=DN_ONLY) + enabled_value = conn.search_s( + self.enabled_emulation_dn, + ldap.SCOPE_BASE, + query, + attrlist=DN_ONLY, + ) except ldap.NO_SUCH_OBJECT: return False else: @@ -1907,23 +2195,22 @@ class EnabledEmuMixIn(BaseLdap): member_attr_val = self._id_to_member_attribute_value(object_id) with self.get_connection() as conn: if not self._is_member_enabled(member_attr_val, conn): - modlist = [(ldap.MOD_ADD, - self.member_attribute, - [member_attr_val])] + modlist = [ + (ldap.MOD_ADD, self.member_attribute, [member_attr_val]) + ] try: conn.modify_s(self.enabled_emulation_dn, modlist) except ldap.NO_SUCH_OBJECT: - attr_list = [('objectClass', [self.group_objectclass]), - (self.member_attribute, - [member_attr_val]), - self.enabled_emulation_naming_attr] + attr_list = [ + ('objectClass', [self.group_objectclass]), + (self.member_attribute, [member_attr_val]), + self.enabled_emulation_naming_attr, + ] conn.add_s(self.enabled_emulation_dn, attr_list) def _remove_enabled(self, object_id): member_attr_val = self._id_to_member_attribute_value(object_id) - modlist = [(ldap.MOD_DELETE, - self.member_attribute, - [member_attr_val])] + modlist = [(ldap.MOD_DELETE, self.member_attribute, [member_attr_val])] with self.get_connection() as conn: try: conn.modify_s(self.enabled_emulation_dn, modlist) @@ -1946,8 +2233,10 @@ class EnabledEmuMixIn(BaseLdap): def get(self, object_id, ldap_filter=None): with self.get_connection() as conn: ref = super(EnabledEmuMixIn, self).get(object_id, ldap_filter) - if ('enabled' not in self.attribute_ignore and - self.enabled_emulation): + if ( + 'enabled' not in self.attribute_ignore + and self.enabled_emulation + ): ref['enabled'] = self._is_id_enabled(object_id, conn) return ref @@ -1955,13 +2244,16 @@ class EnabledEmuMixIn(BaseLdap): hints = hints or driver_hints.Hints() if 'enabled' not in self.attribute_ignore and self.enabled_emulation: # had to copy BaseLdap.get_all here to ldap_filter by DN - obj_list = [self._ldap_res_to_model(x) - for x in self._ldap_get_all(hints, ldap_filter) - if x[0] != self.enabled_emulation_dn] + obj_list = [ + self._ldap_res_to_model(x) + for x in self._ldap_get_all(hints, ldap_filter) + if x[0] != self.enabled_emulation_dn + ] with self.get_connection() as conn: for obj_ref in obj_list: obj_ref['enabled'] = self._is_id_enabled( - obj_ref['id'], conn) + obj_ref['id'], conn + ) return obj_list else: return super(EnabledEmuMixIn, self).get_all(ldap_filter, hints) @@ -1980,4 +2272,5 @@ class EnabledEmuMixIn(BaseLdap): return ref else: return super(EnabledEmuMixIn, self).update( - object_id, values, old_obj) + object_id, values, old_obj + ) diff --git a/keystone/identity/backends/ldap/core.py b/keystone/identity/backends/ldap/core.py index 07f037a408..a054bc2b69 100644 --- a/keystone/identity/backends/ldap/core.py +++ b/keystone/identity/backends/ldap/core.py @@ -28,9 +28,11 @@ from keystone.identity.backends.ldap import models CONF = keystone.conf.CONF LOG = log.getLogger(__name__) -_DEPRECATION_MSG = ('%s for the LDAP identity backend has been deprecated in ' - 'the Mitaka release in favor of read-only identity LDAP ' - 'access. It will be removed in the "O" release.') +_DEPRECATION_MSG = ( + '%s for the LDAP identity backend has been deprecated in ' + 'the Mitaka release in favor of read-only identity LDAP ' + 'access. It will be removed in the "O" release.' +) READ_ONLY_LDAP_ERROR_MESSAGE = _("LDAP does not support write operations") @@ -64,8 +66,9 @@ class Identity(base.IdentityDriverBase): raise AssertionError(_('Invalid user / password')) conn = None try: - conn = self.user.get_connection(user_ref['dn'], - password, end_user_auth=True) + conn = self.user.get_connection( + user_ref['dn'], password, end_user_auth=True + ) if not conn: raise AssertionError(_('Invalid user / password')) except Exception: @@ -128,9 +131,11 @@ class Identity(base.IdentityDriverBase): try: users.append(self.user.get_filtered(user_id)) except exception.UserNotFound: - msg = ('Group member `%(user_id)s` for group `%(group_id)s`' - ' not found in the directory. The user should be' - ' removed from the group. The user will be ignored.') + msg = ( + 'Group member `%(user_id)s` for group `%(group_id)s`' + ' not found in the directory. The user should be' + ' removed from the group. The user will be ignored.' + ) LOG.debug(msg, dict(user_id=user_id, group_id=group_id)) return users @@ -144,10 +149,10 @@ class Identity(base.IdentityDriverBase): if group_member_id == user_id: break else: - raise exception.NotFound(_("User '%(user_id)s' not found in" - " group '%(group_id)s'") % - {'user_id': user_id, - 'group_id': group_id}) + raise exception.NotFound( + _("User '%(user_id)s' not found in" " group '%(group_id)s'") + % {'user_id': user_id, 'group_id': group_id} + ) # Unsupported methods def _disallow_write(self): @@ -238,12 +243,14 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap): DEFAULT_OBJECTCLASS = 'inetOrgPerson' NotFound = exception.UserNotFound options_name = 'user' - attribute_options_names = {'password': 'pass', - 'email': 'mail', - 'name': 'name', - 'description': 'description', - 'enabled': 'enabled', - 'default_project_id': 'default_project_id'} + attribute_options_names = { + 'password': 'pass', + 'email': 'mail', + 'name': 'name', + 'description': 'description', + 'enabled': 'enabled', + 'default_project_id': 'default_project_id', + } immutable_attrs = ['id'] model = models.User @@ -259,8 +266,7 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap): obj = super(UserApi, self)._ldap_res_to_model(res) if self.enabled_mask != 0: enabled = int(obj.get('enabled', self.enabled_default)) - obj['enabled'] = ((enabled & self.enabled_mask) != - self.enabled_mask) + obj['enabled'] = (enabled & self.enabled_mask) != self.enabled_mask elif self.enabled_invert and not self.enabled_emulation: # This could be a bool or a string. If it's a string, # we need to convert it so we can invert it properly. @@ -278,8 +284,9 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap): def mask_enabled_attribute(self, values): value = values['enabled'] values.setdefault('enabled_nomask', int(self.enabled_default)) - if value != ((values['enabled_nomask'] & self.enabled_mask) != - self.enabled_mask): + if value != ( + (values['enabled_nomask'] & self.enabled_mask) != self.enabled_mask + ): values['enabled_nomask'] ^= self.enabled_mask values['enabled'] = values['enabled_nomask'] del values['enabled_nomask'] @@ -297,8 +304,9 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap): else: values['enabled'] = self.enabled_default values = super(UserApi, self).create(values) - if self.enabled_mask or (self.enabled_invert and - not self.enabled_emulation): + if self.enabled_mask or ( + self.enabled_invert and not self.enabled_emulation + ): values['enabled'] = orig_enabled values['options'] = {} # options always empty return values @@ -316,16 +324,18 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap): raise self.NotFound(user_id=user_id) def get_all(self, ldap_filter=None, hints=None): - objs = super(UserApi, self).get_all(ldap_filter=ldap_filter, - hints=hints) + objs = super(UserApi, self).get_all( + ldap_filter=ldap_filter, hints=hints + ) for obj in objs: obj['options'] = {} # options always empty return objs def get_all_filtered(self, hints): query = self.filter_query(hints, self.ldap_filter) - return [self.filter_attributes(user) - for user in self.get_all(query, hints)] + return [ + self.filter_attributes(user) for user in self.get_all(query, hints) + ] def filter_attributes(self, user): return base.filter_user(common_ldap.filter_entity(user)) @@ -360,8 +370,7 @@ class GroupApi(common_ldap.BaseLdap): DEFAULT_MEMBER_ATTRIBUTE = 'member' NotFound = exception.GroupNotFound options_name = 'group' - attribute_options_names = {'description': 'desc', - 'name': 'name'} + attribute_options_names = {'description': 'desc', 'name': 'name'} immutable_attrs = ['name'] model = models.Group @@ -373,8 +382,9 @@ class GroupApi(common_ldap.BaseLdap): def __init__(self, conf): super(GroupApi, self).__init__(conf) self.group_ad_nesting = conf.ldap.group_ad_nesting - self.member_attribute = (conf.ldap.group_member_attribute - or self.DEFAULT_MEMBER_ATTRIBUTE) + self.member_attribute = ( + conf.ldap.group_member_attribute or self.DEFAULT_MEMBER_ATTRIBUTE + ) def create(self, values): data = values.copy() @@ -394,9 +404,10 @@ class GroupApi(common_ldap.BaseLdap): try: super(GroupApi, self).add_member(user_dn, group_dn) except exception.Conflict: - raise exception.Conflict(_( - 'User %(user_id)s is already a member of group %(group_id)s') % - {'user_id': user_id, 'group_id': group_id}) + raise exception.Conflict( + _('User %(user_id)s is already a member of group %(group_id)s') + % {'user_id': user_id, 'group_id': group_id} + ) def list_user_groups(self, user_dn): """Return a list of groups for which the user is a member.""" @@ -405,10 +416,10 @@ class GroupApi(common_ldap.BaseLdap): query = '(%s:%s:=%s)' % ( self.member_attribute, LDAP_MATCHING_RULE_IN_CHAIN, - user_dn_esc) + user_dn_esc, + ) else: - query = '(%s=%s)' % (self.member_attribute, - user_dn_esc) + query = '(%s=%s)' % (self.member_attribute, user_dn_esc) return self.get_all(query) def list_user_groups_filtered(self, user_dn, hints): @@ -420,10 +431,10 @@ class GroupApi(common_ldap.BaseLdap): # member_of elsewhere, so they are not the same. query = '(member:%s:=%s)' % ( LDAP_MATCHING_RULE_IN_CHAIN, - user_dn_esc) + user_dn_esc, + ) else: - query = '(%s=%s)' % (self.member_attribute, - user_dn_esc) + query = '(%s=%s)' % (self.member_attribute, user_dn_esc) return self.get_all_filtered(hints, query) def list_group_users(self, group_id): @@ -437,14 +448,17 @@ class GroupApi(common_ldap.BaseLdap): # coding to SCOPE_SUBTREE to get through the unit tests. # However, it is also probably more correct. attrs = self._ldap_get_list( - self.tree_dn, self.LDAP_SCOPE, + self.tree_dn, + self.LDAP_SCOPE, query_params={ - "member:%s:" % LDAP_MATCHING_RULE_IN_CHAIN: - group_dn}, - attrlist=[self.member_attribute]) + "member:%s:" % LDAP_MATCHING_RULE_IN_CHAIN: group_dn + }, + attrlist=[self.member_attribute], + ) else: - attrs = self._ldap_get_list(group_dn, ldap.SCOPE_BASE, - attrlist=[self.member_attribute]) + attrs = self._ldap_get_list( + group_dn, ldap.SCOPE_BASE, attrlist=[self.member_attribute] + ) except ldap.NO_SUCH_OBJECT: raise self.NotFound(group_id=group_id) @@ -468,5 +482,7 @@ class GroupApi(common_ldap.BaseLdap): if self.ldap_filter: query = (query or '') + self.ldap_filter query = self.filter_query(hints, query) - return [common_ldap.filter_entity(group) - for group in self.get_all(query, hints)] + return [ + common_ldap.filter_entity(group) + for group in self.get_all(query, hints) + ] diff --git a/keystone/identity/backends/ldap/models.py b/keystone/identity/backends/ldap/models.py index 4b970cb259..0e863dca6f 100644 --- a/keystone/identity/backends/ldap/models.py +++ b/keystone/identity/backends/ldap/models.py @@ -48,8 +48,13 @@ class User(Model): """ required_keys = ('id', 'name', 'domain_id') - optional_keys = ('password', 'description', 'email', 'enabled', - 'default_project_id') + optional_keys = ( + 'password', + 'description', + 'email', + 'enabled', + 'default_project_id', + ) class Group(Model): diff --git a/keystone/identity/backends/resource_options.py b/keystone/identity/backends/resource_options.py index 91ecb43a18..92c8933812 100644 --- a/keystone/identity/backends/resource_options.py +++ b/keystone/identity/backends/resource_options.py @@ -23,9 +23,11 @@ def _mfa_rules_validator_list_of_lists_of_strings_no_duplicates(value): # e.g. [['str1', 'str2'], ['str3', 'str4']] # No sub-list may be empty. Duplication of sub-lists and duplication of # string elements are not permitted. - msg = _('Invalid data type, must be a list of lists comprised of strings. ' - 'Sub-lists may not be duplicated. Strings in sub-lists may not be ' - 'duplicated.') + msg = _( + 'Invalid data type, must be a list of lists comprised of strings. ' + 'Sub-lists may not be duplicated. Strings in sub-lists may not be ' + 'duplicated.' + ) if not isinstance(value, list): # Value is not a List, TypeError raise TypeError(msg) @@ -56,63 +58,63 @@ def _mfa_rules_validator_list_of_lists_of_strings_no_duplicates(value): USER_OPTIONS_REGISTRY = resource_options.ResourceOptionRegistry('USER') -IGNORE_CHANGE_PASSWORD_OPT = ( - resource_options.ResourceOption( - option_id='1000', - option_name='ignore_change_password_upon_first_use', - validator=resource_options.boolean_validator, - json_schema_validation=parameter_types.boolean)) -IGNORE_PASSWORD_EXPIRY_OPT = ( - resource_options.ResourceOption( - option_id='1001', - option_name='ignore_password_expiry', - validator=resource_options.boolean_validator, - json_schema_validation=parameter_types.boolean)) -IGNORE_LOCKOUT_ATTEMPT_OPT = ( - resource_options.ResourceOption( - option_id='1002', - option_name='ignore_lockout_failure_attempts', - validator=resource_options.boolean_validator, - json_schema_validation=parameter_types.boolean)) -LOCK_PASSWORD_OPT = ( - resource_options.ResourceOption( - option_id='1003', - option_name='lock_password', - validator=resource_options.boolean_validator, - json_schema_validation=parameter_types.boolean)) -IGNORE_USER_INACTIVITY_OPT = ( - resource_options.ResourceOption( - option_id='1004', - option_name='ignore_user_inactivity', - validator=resource_options.boolean_validator, - json_schema_validation=parameter_types.boolean)) -MFA_RULES_OPT = ( - resource_options.ResourceOption( - option_id='MFAR', - option_name='multi_factor_auth_rules', - validator=_mfa_rules_validator_list_of_lists_of_strings_no_duplicates, - json_schema_validation={ - # List +IGNORE_CHANGE_PASSWORD_OPT = resource_options.ResourceOption( + option_id='1000', + option_name='ignore_change_password_upon_first_use', + validator=resource_options.boolean_validator, + json_schema_validation=parameter_types.boolean, +) +IGNORE_PASSWORD_EXPIRY_OPT = resource_options.ResourceOption( + option_id='1001', + option_name='ignore_password_expiry', + validator=resource_options.boolean_validator, + json_schema_validation=parameter_types.boolean, +) +IGNORE_LOCKOUT_ATTEMPT_OPT = resource_options.ResourceOption( + option_id='1002', + option_name='ignore_lockout_failure_attempts', + validator=resource_options.boolean_validator, + json_schema_validation=parameter_types.boolean, +) +LOCK_PASSWORD_OPT = resource_options.ResourceOption( + option_id='1003', + option_name='lock_password', + validator=resource_options.boolean_validator, + json_schema_validation=parameter_types.boolean, +) +IGNORE_USER_INACTIVITY_OPT = resource_options.ResourceOption( + option_id='1004', + option_name='ignore_user_inactivity', + validator=resource_options.boolean_validator, + json_schema_validation=parameter_types.boolean, +) +MFA_RULES_OPT = resource_options.ResourceOption( + option_id='MFAR', + option_name='multi_factor_auth_rules', + validator=_mfa_rules_validator_list_of_lists_of_strings_no_duplicates, + json_schema_validation={ + # List + 'type': 'array', + 'items': { + # Of Lists 'type': 'array', 'items': { - # Of Lists - 'type': 'array', - 'items': { - # Of Strings, each string must be unique, minimum 1 - # element - 'type': 'string', - }, - 'minItems': 1, - 'uniqueItems': True + # Of Strings, each string must be unique, minimum 1 + # element + 'type': 'string', }, - 'uniqueItems': True - })) -MFA_ENABLED_OPT = ( - resource_options.ResourceOption( - option_id='MFAE', - option_name='multi_factor_auth_enabled', - validator=resource_options.boolean_validator, - json_schema_validation=parameter_types.boolean)) + 'minItems': 1, + 'uniqueItems': True, + }, + 'uniqueItems': True, + }, +) +MFA_ENABLED_OPT = resource_options.ResourceOption( + option_id='MFAE', + option_name='multi_factor_auth_enabled', + validator=resource_options.boolean_validator, + json_schema_validation=parameter_types.boolean, +) # NOTE(notmorgan): wrap this in a function for testing purposes. diff --git a/keystone/identity/backends/sql.py b/keystone/identity/backends/sql.py index 4fbe4c573e..46b09380d6 100644 --- a/keystone/identity/backends/sql.py +++ b/keystone/identity/backends/sql.py @@ -92,7 +92,8 @@ class Identity(base.IdentityDriverBase): """ ignore_option = user_ref.get_resource_option( - options.IGNORE_LOCKOUT_ATTEMPT_OPT.option_id) + options.IGNORE_LOCKOUT_ATTEMPT_OPT.option_id + ) if ignore_option and ignore_option.option_value is True: return False @@ -137,14 +138,16 @@ class Identity(base.IdentityDriverBase): session.add(user_ref) # Set resource options passed on creation resource_options.resource_options_ref_to_mapper( - user_ref, model.UserOption) + user_ref, model.UserOption + ) return base.filter_user(user_ref.to_dict()) def _change_password_required(self, user): if not CONF.security_compliance.change_password_upon_first_use: return False ignore_option = user.get_resource_option( - options.IGNORE_CHANGE_PASSWORD_OPT.option_id) + options.IGNORE_CHANGE_PASSWORD_OPT.option_id + ) return not (ignore_option and ignore_option.option_value is True) def _create_password_expires_query(self, session, query, hints): @@ -152,16 +155,21 @@ class Identity(base.IdentityDriverBase): if 'password_expires_at' == filter_['name']: # Filter on users who's password expires based on the operator # specified in `filter_['comparator']` - query = query.filter(sqlalchemy.and_( - model.LocalUser.id == model.Password.local_user_id, - filter_['comparator'](model.Password.expires_at, - filter_['value']))) + query = query.filter( + sqlalchemy.and_( + model.LocalUser.id == model.Password.local_user_id, + filter_['comparator']( + model.Password.expires_at, filter_['value'] + ), + ) + ) # Removes the `password_expired_at` filters so there are no errors # if the call is filtered further. This is because the # `password_expires_at` value is not stored in the `User` table but # derived from the `Password` table's value `expires_at`. - hints.filters = [x for x in hints.filters if x['name'] != - 'password_expires_at'] + hints.filters = [ + x for x in hints.filters if x['name'] != 'password_expires_at' + ] return query, hints @staticmethod @@ -169,14 +177,15 @@ class Identity(base.IdentityDriverBase): if not hints.limit: return collection - return collection[:hints.limit['limit']] + return collection[: hints.limit['limit']] @driver_hints.truncated def list_users(self, hints): with sql.session_for_read() as session: query = session.query(model.User).outerjoin(model.LocalUser) - query, hints = self._create_password_expires_query(session, query, - hints) + query, hints = self._create_password_expires_query( + session, query, hints + ) user_refs = sql.filter_limit_query(model.User, query, hints) return [base.filter_user(x.to_dict()) for x in user_refs] @@ -196,15 +205,17 @@ class Identity(base.IdentityDriverBase): def get_user(self, user_id): with sql.session_for_read() as session: - return base.filter_user( - self._get_user(session, user_id).to_dict()) + return base.filter_user(self._get_user(session, user_id).to_dict()) def get_user_by_name(self, user_name, domain_id): with sql.session_for_read() as session: query = session.query(model.User).join(model.LocalUser) - query = query.filter(sqlalchemy.and_( - model.LocalUser.name == user_name, - model.LocalUser.domain_id == domain_id)) + query = query.filter( + sqlalchemy.and_( + model.LocalUser.name == user_name, + model.LocalUser.domain_id == domain_id, + ) + ) try: user_ref = query.one() except sql.NotFound: @@ -229,12 +240,16 @@ class Identity(base.IdentityDriverBase): # Move the "_resource_options" attribute over to the real user_ref # so that resource_options.resource_options_ref_to_mapper can # handle the work. - setattr(user_ref, '_resource_options', - getattr(new_user, '_resource_options', {})) + setattr( + user_ref, + '_resource_options', + getattr(new_user, '_resource_options', {}), + ) # Move options into the proper attribute mapper construct resource_options.resource_options_ref_to_mapper( - user_ref, model.UserOption) + user_ref, model.UserOption + ) if 'password' in user: user_ref.password = user['password'] @@ -243,8 +258,7 @@ class Identity(base.IdentityDriverBase): user_ref.password_ref.expires_at = expires_now user_ref.extra = new_user.extra - return base.filter_user( - user_ref.to_dict(include_extra_dict=True)) + return base.filter_user(user_ref.to_dict(include_extra_dict=True)) def _validate_password_history(self, password, user_ref): unique_cnt = CONF.security_compliance.unique_last_password_count @@ -252,15 +266,18 @@ class Identity(base.IdentityDriverBase): if unique_cnt > 0: for password_ref in user_ref.local_user.passwords[-unique_cnt:]: if password_hashing.check_password( - password, password_ref.password_hash): + password, password_ref.password_hash + ): raise exception.PasswordHistoryValidationError( - unique_count=unique_cnt) + unique_count=unique_cnt + ) def change_password(self, user_id, new_password): with sql.session_for_write() as session: user_ref = session.get(model.User, user_id) lock_pw_opt = user_ref.get_resource_option( - options.LOCK_PASSWORD_OPT.option_id) + options.LOCK_PASSWORD_OPT.option_id + ) if lock_pw_opt is not None and lock_pw_opt.option_value is True: raise exception.PasswordSelfServiceDisabled() if user_ref.password_ref and user_ref.password_ref.self_service: @@ -271,12 +288,14 @@ class Identity(base.IdentityDriverBase): def _validate_minimum_password_age(self, user_ref): min_age_days = CONF.security_compliance.minimum_password_age - min_age = (user_ref.password_created_at + - datetime.timedelta(days=min_age_days)) + min_age = user_ref.password_created_at + datetime.timedelta( + days=min_age_days + ) if datetime.datetime.utcnow() < min_age: days_left = (min_age - datetime.datetime.utcnow()).days raise exception.PasswordAgeValidationError( - min_age_days=min_age_days, days_left=days_left) + min_age_days=min_age_days, days_left=days_left + ) def add_user_to_group(self, user_id, group_id): with sql.session_for_write() as session: @@ -289,8 +308,9 @@ class Identity(base.IdentityDriverBase): if rv: return - session.add(model.UserGroupMembership(user_id=user_id, - group_id=group_id)) + session.add( + model.UserGroupMembership(user_id=user_id, group_id=group_id) + ) def check_user_in_group(self, user_id, group_id): with sql.session_for_read() as session: @@ -307,17 +327,19 @@ class Identity(base.IdentityDriverBase): # Note(knikolla): Check for expiring group membership query = session.query(model.ExpiringUserGroupMembership) query = query.filter( - model.ExpiringUserGroupMembership.user_id == user_id) + model.ExpiringUserGroupMembership.user_id == user_id + ) query = query.filter( - model.ExpiringUserGroupMembership.group_id == group_id) + model.ExpiringUserGroupMembership.group_id == group_id + ) active = [q for q in query.all() if not q.expired] if active: return - raise exception.NotFound(_("User '%(user_id)s' not found in" - " group '%(group_id)s'") % - {'user_id': user_id, - 'group_id': group_id}) + raise exception.NotFound( + _("User '%(user_id)s' not found in" " group '%(group_id)s'") + % {'user_id': user_id, 'group_id': group_id} + ) def remove_user_from_group(self, user_id, group_id): # We don't check if user or group are still valid and let the remove @@ -332,10 +354,13 @@ class Identity(base.IdentityDriverBase): # exceptions. self.get_group(group_id) self.get_user(user_id) - raise exception.NotFound(_("User '%(user_id)s' not found in" - " group '%(group_id)s'") % - {'user_id': user_id, - 'group_id': group_id}) + raise exception.NotFound( + _( + "User '%(user_id)s' not found in" + " group '%(group_id)s'" + ) + % {'user_id': user_id, 'group_id': group_id} + ) session.delete(membership_ref) def list_groups_for_user(self, user_id, hints): @@ -355,11 +380,14 @@ class Identity(base.IdentityDriverBase): # so that we can access the expired property. query = session.query(model.ExpiringUserGroupMembership) query = query.filter( - model.ExpiringUserGroupMembership.user_id == user_id) + model.ExpiringUserGroupMembership.user_id == user_id + ) query = sql.filter_limit_query( - model.UserGroupMembership, query, hints) - expiring_groups = [row_to_group_dict(r) for r in query.all() - if not r.expired] + model.UserGroupMembership, query, hints + ) + expiring_groups = [ + row_to_group_dict(r) for r in query.all() if not r.expired + ] # Note(knikolla): I would have loved to be able to merge the two # queries together and use filter_limit_query on the union, but @@ -373,9 +401,11 @@ class Identity(base.IdentityDriverBase): query = session.query(model.User).outerjoin(model.LocalUser) query = query.join(model.UserGroupMembership) query = query.filter( - model.UserGroupMembership.group_id == group_id) - query, hints = self._create_password_expires_query(session, query, - hints) + model.UserGroupMembership.group_id == group_id + ) + query, hints = self._create_password_expires_query( + session, query, hints + ) query = sql.filter_limit_query(model.User, query, hints) return [base.filter_user(u.to_dict()) for u in query] diff --git a/keystone/identity/backends/sql_model.py b/keystone/identity/backends/sql_model.py index 1ef64e068b..3ff2c52b3a 100644 --- a/keystone/identity/backends/sql_model.py +++ b/keystone/identity/backends/sql_model.py @@ -31,8 +31,15 @@ CONF = keystone.conf.CONF class User(sql.ModelBase, sql.ModelDictMixinWithExtras): __tablename__ = 'user' - attributes = ['id', 'name', 'domain_id', 'password', 'enabled', - 'default_project_id', 'password_expires_at'] + attributes = [ + 'id', + 'name', + 'domain_id', + 'password', + 'enabled', + 'default_project_id', + 'password_expires_at', + ] readonly_attributes = ['id', 'password_expires_at', 'password'] resource_options_registry = iro.USER_OPTIONS_REGISTRY id = sql.Column(sql.String(64), primary_key=True) @@ -46,25 +53,35 @@ class User(sql.ModelBase, sql.ModelDictMixinWithExtras): cascade='all,delete,delete-orphan', lazy='subquery', backref='user', - collection_class=collections.attribute_mapped_collection('option_id')) - local_user = orm.relationship('LocalUser', uselist=False, - single_parent=True, lazy='joined', - cascade='all,delete-orphan', backref='user') - federated_users = orm.relationship('FederatedUser', - single_parent=True, - lazy='joined', - cascade='all,delete-orphan', - backref='user') - nonlocal_user = orm.relationship('NonLocalUser', - uselist=False, - single_parent=True, - lazy='joined', - cascade='all,delete-orphan', - backref='user') + collection_class=collections.attribute_mapped_collection('option_id'), + ) + local_user = orm.relationship( + 'LocalUser', + uselist=False, + single_parent=True, + lazy='joined', + cascade='all,delete-orphan', + backref='user', + ) + federated_users = orm.relationship( + 'FederatedUser', + single_parent=True, + lazy='joined', + cascade='all,delete-orphan', + backref='user', + ) + nonlocal_user = orm.relationship( + 'NonLocalUser', + uselist=False, + single_parent=True, + lazy='joined', + cascade='all,delete-orphan', + backref='user', + ) expiring_user_group_memberships = orm.relationship( 'ExpiringUserGroupMembership', cascade='all, delete-orphan', - backref="user" + backref="user", ) created_at = sql.Column(sql.DateTime, nullable=True) last_active_at = sql.Column(sql.Date, nullable=True) @@ -176,14 +193,16 @@ class User(sql.ModelBase, sql.ModelDictMixinWithExtras): return getattr( self.get_resource_option(iro.IGNORE_PASSWORD_EXPIRY_OPT.option_id), 'option_value', - False) + False, + ) def _get_password_expires_at(self, created_at): expires_days = CONF.security_compliance.password_expires_days if not self._password_expiry_exempt(): if expires_days: - expired_date = (created_at + - datetime.timedelta(days=expires_days)) + expired_date = created_at + datetime.timedelta( + days=expires_days + ) return expired_date.replace(microsecond=0) return None @@ -198,12 +217,15 @@ class User(sql.ModelBase, sql.ModelDictMixinWithExtras): """Return whether user is enabled or not.""" if self._enabled: max_days = ( - CONF.security_compliance.disable_user_account_days_inactive) + CONF.security_compliance.disable_user_account_days_inactive + ) inactivity_exempt = getattr( self.get_resource_option( - iro.IGNORE_USER_INACTIVITY_OPT.option_id), + iro.IGNORE_USER_INACTIVITY_OPT.option_id + ), 'option_value', - False) + False, + ) last_active = self.last_active_at if not last_active and self.created_at: last_active = self.created_at.date() @@ -216,8 +238,10 @@ class User(sql.ModelBase, sql.ModelDictMixinWithExtras): @enabled.setter def enabled(self, value): - if (value and - CONF.security_compliance.disable_user_account_days_inactive): + if ( + value + and CONF.security_compliance.disable_user_account_days_inactive + ): self.last_active_at = datetime.datetime.utcnow().date() if value and self.local_user: self.local_user.failed_auth_count = 0 @@ -279,27 +303,37 @@ class LocalUser(sql.ModelBase, sql.ModelDictMixin): user_id = sql.Column(sql.String(64), nullable=False) domain_id = sql.Column(sql.String(64), nullable=False) name = sql.Column(sql.String(255), nullable=False) - passwords = orm.relationship('Password', - single_parent=True, - cascade='all,delete-orphan', - lazy='joined', - backref='local_user', - order_by='Password.created_at_int') + passwords = orm.relationship( + 'Password', + single_parent=True, + cascade='all,delete-orphan', + lazy='joined', + backref='local_user', + order_by='Password.created_at_int', + ) failed_auth_count = sql.Column(sql.Integer, nullable=True) failed_auth_at = sql.Column(sql.DateTime, nullable=True) __table_args__ = ( sql.UniqueConstraint('user_id'), sql.UniqueConstraint('domain_id', 'name'), - sqlalchemy.ForeignKeyConstraint(['user_id', 'domain_id'], - ['user.id', 'user.domain_id'], - onupdate='CASCADE', ondelete='CASCADE') + sqlalchemy.ForeignKeyConstraint( + ['user_id', 'domain_id'], + ['user.id', 'user.domain_id'], + onupdate='CASCADE', + ondelete='CASCADE', + ), ) class Password(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'password' - attributes = ['id', 'local_user_id', 'password_hash', 'created_at', - 'expires_at'] + attributes = [ + 'id', + 'local_user_id', + 'password_hash', + 'created_at', + 'expires_at', + ] id = sql.Column(sql.Integer, primary_key=True) local_user_id = sql.Column( sql.Integer, @@ -314,8 +348,12 @@ class Password(sql.ModelBase, sql.ModelDictMixin): # big integers. The old datetime columns and their corresponding attributes # in the model are no longer required. # created_at default set here to safe guard in case it gets missed - _created_at = sql.Column('created_at', sql.DateTime, nullable=False, - default=datetime.datetime.utcnow) + _created_at = sql.Column( + 'created_at', + sql.DateTime, + nullable=False, + default=datetime.datetime.utcnow, + ) _expires_at = sql.Column('expires_at', sql.DateTime, nullable=True) # set the default to 0, a 0 indicates it is unset. created_at_int = sql.Column( @@ -353,8 +391,14 @@ class Password(sql.ModelBase, sql.ModelDictMixin): class FederatedUser(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'federated_user' - attributes = ['id', 'user_id', 'idp_id', 'protocol_id', 'unique_id', - 'display_name'] + attributes = [ + 'id', + 'user_id', + 'idp_id', + 'protocol_id', + 'unique_id', + 'display_name', + ] id = sql.Column(sql.Integer, primary_key=True) user_id = sql.Column( sql.String(64), @@ -371,10 +415,11 @@ class FederatedUser(sql.ModelBase, sql.ModelDictMixin): display_name = sql.Column(sql.String(255), nullable=True) __table_args__ = ( sql.UniqueConstraint('idp_id', 'protocol_id', 'unique_id'), - sqlalchemy.ForeignKeyConstraint(['protocol_id', 'idp_id'], - ['federation_protocol.id', - 'federation_protocol.idp_id'], - ondelete='CASCADE') + sqlalchemy.ForeignKeyConstraint( + ['protocol_id', 'idp_id'], + ['federation_protocol.id', 'federation_protocol.idp_id'], + ondelete='CASCADE', + ), ) @@ -389,8 +434,12 @@ class NonLocalUser(sql.ModelBase, sql.ModelDictMixin): __table_args__ = ( sql.UniqueConstraint('user_id'), sqlalchemy.ForeignKeyConstraint( - ['user_id', 'domain_id'], ['user.id', 'user.domain_id'], - onupdate='CASCADE', ondelete='CASCADE'),) + ['user_id', 'domain_id'], + ['user.id', 'user.domain_id'], + onupdate='CASCADE', + ondelete='CASCADE', + ), + ) class Group(sql.ModelBase, sql.ModelDictMixinWithExtras): @@ -404,7 +453,7 @@ class Group(sql.ModelBase, sql.ModelDictMixinWithExtras): expiring_user_group_memberships = orm.relationship( 'ExpiringUserGroupMembership', cascade='all, delete-orphan', - backref="group" + backref="group", ) # Unique constraint across two columns to create the separation # rather than just only 'name' being unique @@ -415,28 +464,29 @@ class UserGroupMembership(sql.ModelBase, sql.ModelDictMixin): """Group membership join table.""" __tablename__ = 'user_group_membership' - user_id = sql.Column(sql.String(64), - sql.ForeignKey('user.id'), - primary_key=True) - group_id = sql.Column(sql.String(64), - sql.ForeignKey('group.id'), - primary_key=True) + user_id = sql.Column( + sql.String(64), sql.ForeignKey('user.id'), primary_key=True + ) + group_id = sql.Column( + sql.String(64), sql.ForeignKey('group.id'), primary_key=True + ) class ExpiringUserGroupMembership(sql.ModelBase, sql.ModelDictMixin): """Expiring group membership through federation mapping rules.""" __tablename__ = 'expiring_user_group_membership' - user_id = sql.Column(sql.String(64), - sql.ForeignKey('user.id'), - primary_key=True) - group_id = sql.Column(sql.String(64), - sql.ForeignKey('group.id'), - primary_key=True) - idp_id = sql.Column(sql.String(64), - sql.ForeignKey('identity_provider.id', - ondelete='CASCADE'), - primary_key=True) + user_id = sql.Column( + sql.String(64), sql.ForeignKey('user.id'), primary_key=True + ) + group_id = sql.Column( + sql.String(64), sql.ForeignKey('group.id'), primary_key=True + ) + idp_id = sql.Column( + sql.String(64), + sql.ForeignKey('identity_provider.id', ondelete='CASCADE'), + primary_key=True, + ) last_verified = sql.Column(sql.DateTime, nullable=False) @hybrid_property @@ -453,11 +503,13 @@ class ExpiringUserGroupMembership(sql.ModelBase, sql.ModelDictMixin): class UserOption(sql.ModelBase): __tablename__ = 'user_option' - user_id = sql.Column(sql.String(64), sql.ForeignKey('user.id', - ondelete='CASCADE'), nullable=False, - primary_key=True) - option_id = sql.Column(sql.String(4), nullable=False, - primary_key=True) + user_id = sql.Column( + sql.String(64), + sql.ForeignKey('user.id', ondelete='CASCADE'), + nullable=False, + primary_key=True, + ) + option_id = sql.Column(sql.String(4), nullable=False, primary_key=True) option_value = sql.Column(sql.JsonBlob, nullable=True) def __init__(self, option_id, option_value): diff --git a/keystone/identity/core.py b/keystone/identity/core.py index f440bfc31f..0eff2aefc9 100644 --- a/keystone/identity/core.py +++ b/keystone/identity/core.py @@ -50,8 +50,9 @@ PROVIDERS = provider_api.ProviderAPIs MEMOIZE = cache.get_memoization_decorator(group='identity') ID_MAPPING_REGION = cache.create_region(name='id mapping') -MEMOIZE_ID_MAPPING = cache.get_memoization_decorator(group='identity', - region=ID_MAPPING_REGION) +MEMOIZE_ID_MAPPING = cache.get_memoization_decorator( + group='identity', region=ID_MAPPING_REGION +) DOMAIN_CONF_FHEAD = 'keystone.' DOMAIN_CONF_FTAIL = '.conf' @@ -72,13 +73,12 @@ def get_driver(namespace, driver_name, *args): looking for additional configuration options required by the driver. """ try: - driver_manager = stevedore.DriverManager(namespace, - driver_name, - invoke_on_load=False, - invoke_args=args) + driver_manager = stevedore.DriverManager( + namespace, driver_name, invoke_on_load=False, invoke_args=args + ) return driver_manager.driver except stevedore.exception.NoMatches: - msg = (_('Unable to find %(name)r driver in %(namespace)r.')) + msg = _('Unable to find %(name)r driver in %(namespace)r.') raise ImportError(msg % {'name': driver_name, 'namespace': namespace}) @@ -105,9 +105,11 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): lock = threading.Lock() def _load_driver(self, domain_config): - return manager.load_driver(Manager.driver_namespace, - domain_config['cfg'].identity.driver, - domain_config['cfg']) + return manager.load_driver( + Manager.driver_namespace, + domain_config['cfg'].identity.driver, + domain_config['cfg'], + ) def _load_config_from_file(self, resource_api, file_list, domain_name): @@ -118,8 +120,9 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): would cause there to be more than one sql driver. """ - if (new_config['driver'].is_sql and - (self.driver.is_sql or self._any_sql)): + if new_config['driver'].is_sql and ( + self.driver.is_sql or self._any_sql + ): # The addition of this driver would cause us to have more than # one sql driver, so raise an exception. raise exception.MultipleSQLDriversInConfig(source=config_file) @@ -128,8 +131,10 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): try: domain_ref = resource_api.get_domain_by_name(domain_name) except exception.DomainNotFound: - LOG.warning('Invalid domain name (%s) found in config file name', - domain_name) + LOG.warning( + 'Invalid domain name (%s) found in config file name', + domain_name, + ) return # Create a new entry in the domain config dict, which contains @@ -140,9 +145,12 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): domain_config = {} domain_config['cfg'] = cfg.ConfigOpts() keystone.conf.configure(conf=domain_config['cfg']) - domain_config['cfg'](args=[], project='keystone', - default_config_files=file_list, - default_config_dirs=[]) + domain_config['cfg']( + args=[], + project='keystone', + default_config_files=file_list, + default_config_dirs=[], + ) domain_config['driver'] = self._load_driver(domain_config) _assert_no_more_than_one_sql_driver(domain_config, file_list) self[domain_ref['id']] = domain_config @@ -165,23 +173,34 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): """ conf_dir = CONF.identity.domain_config_dir if not os.path.exists(conf_dir): - LOG.warning('Unable to locate domain config directory: %s', - conf_dir) + LOG.warning( + 'Unable to locate domain config directory: %s', conf_dir + ) return for r, d, f in os.walk(conf_dir): for fname in f: - if (fname.startswith(DOMAIN_CONF_FHEAD) and - fname.endswith(DOMAIN_CONF_FTAIL)): + if fname.startswith(DOMAIN_CONF_FHEAD) and fname.endswith( + DOMAIN_CONF_FTAIL + ): if fname.count('.') >= 2: self._load_config_from_file( - resource_api, [os.path.join(r, fname)], - fname[len(DOMAIN_CONF_FHEAD): - -len(DOMAIN_CONF_FTAIL)]) + resource_api, + [os.path.join(r, fname)], + fname[ + len(DOMAIN_CONF_FHEAD) : -len( + DOMAIN_CONF_FTAIL + ) + ], + ) else: - LOG.debug(('Ignoring file (%s) while scanning domain ' - 'config directory'), - fname) + LOG.debug( + ( + 'Ignoring file (%s) while scanning domain ' + 'config directory' + ), + fname, + ) def _load_config_from_database(self, domain_id, specific_config): @@ -211,22 +230,31 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): domain_registered = 'Unknown' for attempt in range(REGISTRATION_ATTEMPTS): if PROVIDERS.domain_config_api.obtain_registration( - domain_id, SQL_DRIVER): - LOG.debug('Domain %s successfully registered to use the ' - 'SQL driver.', domain_id) + domain_id, SQL_DRIVER + ): + LOG.debug( + 'Domain %s successfully registered to use the ' + 'SQL driver.', + domain_id, + ) return # We failed to register our use, let's find out who is using it try: domain_registered = ( PROVIDERS.domain_config_api.read_registration( - SQL_DRIVER)) + SQL_DRIVER + ) + ) except exception.ConfigRegistrationNotFound: - msg = ('While attempting to register domain %(domain)s to ' - 'use the SQL driver, another process released it, ' - 'retrying (attempt %(attempt)s).') - LOG.debug(msg, {'domain': domain_id, - 'attempt': attempt + 1}) + msg = ( + 'While attempting to register domain %(domain)s to ' + 'use the SQL driver, another process released it, ' + 'retrying (attempt %(attempt)s).' + ) + LOG.debug( + msg, {'domain': domain_id, 'attempt': attempt + 1} + ) continue if domain_registered == domain_id: @@ -235,10 +263,13 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): # in the middle of deleting this domain, we know the domain # is already disabled and hence telling the caller that we # are registered is benign. - LOG.debug('While attempting to register domain %s to use ' - 'the SQL driver, found that another process had ' - 'already registered this domain. This is normal ' - 'in multi-process configurations.', domain_id) + LOG.debug( + 'While attempting to register domain %s to use ' + 'the SQL driver, found that another process had ' + 'already registered this domain. This is normal ' + 'in multi-process configurations.', + domain_id, + ) return # So we don't have it, but someone else does...let's check that @@ -246,39 +277,52 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): try: PROVIDERS.resource_api.get_domain(domain_registered) except exception.DomainNotFound: - msg = ('While attempting to register domain %(domain)s to ' - 'use the SQL driver, found that it was already ' - 'registered to a domain that no longer exists ' - '(%(old_domain)s). Removing this stale ' - 'registration and retrying (attempt %(attempt)s).') - LOG.debug(msg, {'domain': domain_id, - 'old_domain': domain_registered, - 'attempt': attempt + 1}) + msg = ( + 'While attempting to register domain %(domain)s to ' + 'use the SQL driver, found that it was already ' + 'registered to a domain that no longer exists ' + '(%(old_domain)s). Removing this stale ' + 'registration and retrying (attempt %(attempt)s).' + ) + LOG.debug( + msg, + { + 'domain': domain_id, + 'old_domain': domain_registered, + 'attempt': attempt + 1, + }, + ) PROVIDERS.domain_config_api.release_registration( - domain_registered, type=SQL_DRIVER) + domain_registered, type=SQL_DRIVER + ) continue # The domain is valid, so we really do have an attempt at more # than one SQL driver. details = ( - _('Config API entity at /domains/%s/config') % domain_id) + _('Config API entity at /domains/%s/config') % domain_id + ) raise exception.MultipleSQLDriversInConfig(source=details) # We fell out of the loop without either registering our domain or # being able to find who has it...either we were very very very # unlucky or something is awry. - msg = _('Exceeded attempts to register domain %(domain)s to use ' - 'the SQL driver, the last domain that appears to have ' - 'had it is %(last_domain)s, giving up') % { - 'domain': domain_id, 'last_domain': domain_registered} + msg = _( + 'Exceeded attempts to register domain %(domain)s to use ' + 'the SQL driver, the last domain that appears to have ' + 'had it is %(last_domain)s, giving up' + ) % {'domain': domain_id, 'last_domain': domain_registered} raise exception.UnexpectedError(msg) domain_config = {} domain_config['cfg'] = cfg.ConfigOpts() keystone.conf.configure(conf=domain_config['cfg']) - domain_config['cfg'](args=[], project='keystone', - default_config_files=[], - default_config_dirs=[]) + domain_config['cfg']( + args=[], + project='keystone', + default_config_files=[], + default_config_dirs=[], + ) # Try to identify the required driver for the domain to let it register # supported configuration options. In difference to the FS based @@ -286,12 +330,12 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): # thus require special treatment. try: driver_name = specific_config.get("identity", {}).get( - "driver", domain_config["cfg"].identity.driver) + "driver", domain_config["cfg"].identity.driver + ) # For the non in-tree drivers ... if driver_name not in ["sql", "ldap"]: # Locate the driver without invoking ... - driver = get_driver( - Manager.driver_namespace, driver_name) + driver = get_driver(Manager.driver_namespace, driver_name) # Check whether it wants to register additional config options # ... if hasattr(driver, "register_opts"): @@ -305,7 +349,8 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): # register config options with the DB configuration loading branch. LOG.debug( f"Exception during attempt to load domain specific " - f"configuration options: {ex}") + f"configuration options: {ex}" + ) # Override any options that have been passed in as specified in the # database. @@ -319,7 +364,8 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): # to complete the process properly. try: domain_config['cfg'].set_override( - option, specific_config[group][option], group) + option, specific_config[group][option], group + ) except (cfg.NoSuchOptError, cfg.NoSuchGroupError): # Error to register config overrides for wrong driver. This # is not worth of logging since it is a normal case during @@ -331,8 +377,9 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): _assert_no_more_than_one_sql_driver(domain_id, domain_config) self[domain_id] = domain_config - def _setup_domain_drivers_from_database(self, standard_driver, - resource_api): + def _setup_domain_drivers_from_database( + self, standard_driver, resource_api + ): """Read domain specific configuration from database and load drivers. Domain configurations are stored in the domain-config backend, @@ -346,22 +393,27 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): """ for domain in resource_api.list_domains(): domain_config_options = ( - PROVIDERS.domain_config_api. - get_config_with_sensitive_info(domain['id'])) + PROVIDERS.domain_config_api.get_config_with_sensitive_info( + domain['id'] + ) + ) if domain_config_options: - self._load_config_from_database(domain['id'], - domain_config_options) + self._load_config_from_database( + domain['id'], domain_config_options + ) def setup_domain_drivers(self, standard_driver, resource_api): # This is called by the api call wrapper self.driver = standard_driver if CONF.identity.domain_configurations_from_database: - self._setup_domain_drivers_from_database(standard_driver, - resource_api) + self._setup_domain_drivers_from_database( + standard_driver, resource_api + ) else: - self._setup_domain_drivers_from_files(standard_driver, - resource_api) + self._setup_domain_drivers_from_files( + standard_driver, resource_api + ) self.configured = True def get_domain_driver(self, domain_id): @@ -384,8 +436,7 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): # read. if self.configured: if domain_id in self: - self[domain_id]['driver'] = ( - self._load_driver(self[domain_id])) + self[domain_id]['driver'] = self._load_driver(self[domain_id]) else: # The standard driver self.driver = self.driver() @@ -413,8 +464,10 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): process, next time it accesses the driver it will pickup the new one. """ - if (not CONF.identity.domain_specific_drivers_enabled or - not CONF.identity.domain_configurations_from_database): + if ( + not CONF.identity.domain_specific_drivers_enabled + or not CONF.identity.domain_configurations_from_database + ): # If specific drivers are not enabled, then there is nothing to do. # If we are not storing the configurations in the database, then # we'll only re-read the domain specific config files on startup @@ -422,15 +475,20 @@ class DomainConfigs(provider_api.ProviderAPIMixin, dict): return latest_domain_config = ( - PROVIDERS.domain_config_api. - get_config_with_sensitive_info(domain_id)) + PROVIDERS.domain_config_api.get_config_with_sensitive_info( + domain_id + ) + ) domain_config_in_use = domain_id in self if latest_domain_config: - if (not domain_config_in_use or - latest_domain_config != self[domain_id]['cfg_overrides']): - self._load_config_from_database(domain_id, - latest_domain_config) + if ( + not domain_config_in_use + or latest_domain_config != self[domain_id]['cfg_overrides'] + ): + self._load_config_from_database( + domain_id, latest_domain_config + ) elif domain_config_in_use: # The domain specific config has been deleted, so should remove the # specific driver for this domain. @@ -456,10 +514,13 @@ def domains_configured(f): to each call, and if requires load them, """ + @functools.wraps(f) def wrapper(self, *args, **kwargs): - if (not self.domain_configs.configured and - CONF.identity.domain_specific_drivers_enabled): + if ( + not self.domain_configs.configured + and CONF.identity.domain_specific_drivers_enabled + ): # If domain specific driver has not been configured, acquire the # lock and proceed with loading the driver. with self.domain_configs.lock: @@ -467,13 +528,16 @@ def domains_configured(f): # completed domain config. if not self.domain_configs.configured: self.domain_configs.setup_domain_drivers( - self.driver, PROVIDERS.resource_api) + self.driver, PROVIDERS.resource_api + ) return f(self, *args, **kwargs) + return wrapper def exception_translated(exception_type): """Wrap API calls to map to correct exception.""" + def _exception_translated(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): @@ -488,7 +552,9 @@ def exception_translated(exception_type): raise AssertionError(_('Invalid user / password')) else: raise + return wrapper + return _exception_translated @@ -542,8 +608,9 @@ class Manager(manager.Manager): super(Manager, self).__init__(CONF.identity.driver) self.domain_configs = DomainConfigs() notifications.register_event_callback( - notifications.ACTIONS.internal, notifications.DOMAIN_DELETED, - self._domain_deleted + notifications.ACTIONS.internal, + notifications.DOMAIN_DELETED, + self._domain_deleted, ) self.event_callbacks = { notifications.ACTIONS.deleted: { @@ -551,8 +618,7 @@ class Manager(manager.Manager): }, } - def _domain_deleted(self, service, resource_type, operation, - payload): + def _domain_deleted(self, service, resource_type, operation, payload): domain_id = payload['resource_info'] driver = self._select_identity_driver(domain_id) @@ -564,10 +630,14 @@ class Manager(manager.Manager): try: self.delete_group(group['id']) except exception.GroupNotFound: - LOG.debug(('Group %(groupid)s not found when deleting ' - 'domain contents for %(domainid)s, continuing ' - 'with cleanup.'), - {'groupid': group['id'], 'domainid': domain_id}) + LOG.debug( + ( + 'Group %(groupid)s not found when deleting ' + 'domain contents for %(domainid)s, continuing ' + 'with cleanup.' + ), + {'groupid': group['id'], 'domainid': domain_id}, + ) # And finally, delete the users themselves user_refs = self.list_users(domain_scope=domain_id) @@ -579,13 +649,18 @@ class Manager(manager.Manager): else: self.delete_user(user['id']) except exception.UserNotFound: - LOG.debug(('User %(userid)s not found when deleting domain ' - 'contents for %(domainid)s, continuing with ' - 'cleanup.'), - {'userid': user['id'], 'domainid': domain_id}) + LOG.debug( + ( + 'User %(userid)s not found when deleting domain ' + 'contents for %(domainid)s, continuing with ' + 'cleanup.' + ), + {'userid': user['id'], 'domainid': domain_id}, + ) - def _unset_default_project(self, service, resource_type, operation, - payload): + def _unset_default_project( + self, service, resource_type, operation, payload + ): """Callback, clears user default_project_id after project deletion. Notifications are used to unset a user's default project because @@ -609,8 +684,7 @@ class Manager(manager.Manager): pass # Domain ID normalization methods - def _set_domain_id_and_mapping(self, ref, domain_id, driver, - entity_type): + def _set_domain_id_and_mapping(self, ref, domain_id, driver, entity_type): """Patch the domain_id/public_id into the resulting entity(ies). :param ref: the entity or list of entities to post process @@ -643,29 +717,38 @@ class Manager(manager.Manager): # a classic case would be when running with a single SQL driver return ref - LOG.debug('ID Mapping - Domain ID: %(domain)s, ' - 'Default Driver: %(driver)s, ' - 'Domains: %(aware)s, UUIDs: %(generate)s, ' - 'Compatible IDs: %(compat)s', - {'domain': domain_id, - 'driver': (driver == self.driver), - 'aware': driver.is_domain_aware(), - 'generate': driver.generates_uuids(), - 'compat': CONF.identity_mapping.backward_compatible_ids}) + LOG.debug( + 'ID Mapping - Domain ID: %(domain)s, ' + 'Default Driver: %(driver)s, ' + 'Domains: %(aware)s, UUIDs: %(generate)s, ' + 'Compatible IDs: %(compat)s', + { + 'domain': domain_id, + 'driver': (driver == self.driver), + 'aware': driver.is_domain_aware(), + 'generate': driver.generates_uuids(), + 'compat': CONF.identity_mapping.backward_compatible_ids, + }, + ) if isinstance(ref, dict): return self._set_domain_id_and_mapping_for_single_ref( - ref, domain_id, driver, entity_type, conf) + ref, domain_id, driver, entity_type, conf + ) elif isinstance(ref, list): return self._set_domain_id_and_mapping_for_list( - ref, domain_id, driver, entity_type, conf) + ref, domain_id, driver, entity_type, conf + ) else: raise ValueError(_('Expected dict or list: %s') % type(ref)) def _needs_post_processing(self, driver): """Return whether entity from driver needs domain added or mapping.""" - return (driver is not self.driver or not driver.generates_uuids() or - not driver.is_domain_aware()) + return ( + driver is not self.driver + or not driver.generates_uuids() + or not driver.is_domain_aware() + ) def _insert_new_public_id(self, local_entity, ref, driver): # Need to create a mapping. If the driver generates UUIDs @@ -674,11 +757,13 @@ class Manager(manager.Manager): if driver.generates_uuids(): public_id = ref['id'] ref['id'] = PROVIDERS.id_mapping_api.create_id_mapping( - local_entity, public_id) + local_entity, public_id + ) LOG.debug('Created new mapping to public ID: %s', ref['id']) - def _set_domain_id_and_mapping_for_single_ref(self, ref, domain_id, - driver, entity_type, conf): + def _set_domain_id_and_mapping_for_single_ref( + self, ref, domain_id, driver, entity_type, conf + ): LOG.debug('Local ID: %s', ref['id']) ref = ref.copy() @@ -688,20 +773,22 @@ class Manager(manager.Manager): ref['domain_id'] = domain_id if self._is_mapping_needed(driver): - local_entity = {'domain_id': ref['domain_id'], - 'local_id': ref['id'], - 'entity_type': entity_type} + local_entity = { + 'domain_id': ref['domain_id'], + 'local_id': ref['id'], + 'entity_type': entity_type, + } public_id = PROVIDERS.id_mapping_api.get_public_id(local_entity) if public_id: ref['id'] = public_id - LOG.debug('Found existing mapping to public ID: %s', - ref['id']) + LOG.debug('Found existing mapping to public ID: %s', ref['id']) else: self._insert_new_public_id(local_entity, ref, driver) return ref - def _set_domain_id_and_mapping_for_list(self, ref_list, domain_id, driver, - entity_type, conf): + def _set_domain_id_and_mapping_for_list( + self, ref_list, domain_id, driver, entity_type, conf + ): """Set domain id and mapping for a list of refs. The method modifies refs in-place. @@ -730,7 +817,8 @@ class Manager(manager.Manager): # fetch all mappings for the domain, lookup the user at the map built # at previous step and replace his id. domain_mappings = PROVIDERS.id_mapping_api.get_domain_mapping_list( - domain_id, entity_type=entity_type) + domain_id, entity_type=entity_type + ) for _mapping in domain_mappings: idx = (_mapping.local_id, _mapping.entity_type, _mapping.domain_id) try: @@ -745,9 +833,11 @@ class Manager(manager.Manager): # at this point, all known refs were granted a public_id. For the refs # left, there are no mappings. They need to be created. for ref in refs_map.values(): - local_entity = {'domain_id': ref['domain_id'], - 'local_id': ref['id'], - 'entity_type': entity_type} + local_entity = { + 'domain_id': ref['domain_id'], + 'local_id': ref['id'], + 'entity_type': entity_type, + } self._insert_new_public_id(local_entity, ref, driver) return ref_list @@ -762,9 +852,10 @@ class Manager(manager.Manager): current LDAP) """ is_not_default_driver = driver is not self.driver - return (is_not_default_driver or ( - not driver.generates_uuids() and - not CONF.identity_mapping.backward_compatible_ids)) + return is_not_default_driver or ( + not driver.generates_uuids() + and not CONF.identity_mapping.backward_compatible_ids + ) def _clear_domain_id_if_domain_unaware(self, driver, ref): """Clear domain_id details if driver is not domain aware.""" @@ -790,22 +881,27 @@ class Manager(manager.Manager): if domain_id is None: driver = self.driver else: - driver = (self.domain_configs.get_domain_driver(domain_id) or - self.driver) + driver = ( + self.domain_configs.get_domain_driver(domain_id) or self.driver + ) # If the driver is not domain aware (e.g. LDAP) then check to # ensure we are not mapping multiple domains onto it - the only way # that would happen is that the default driver is LDAP and the # domain is anything other than None or the default domain. - if (not driver.is_domain_aware() and driver == self.driver and - domain_id != CONF.identity.default_domain_id and - domain_id is not None): - LOG.warning('Found multiple domains being mapped to a ' - 'driver that does not support that (e.g. ' - 'LDAP) - Domain ID: %(domain)s, ' - 'Default Driver: %(driver)s', - {'domain': domain_id, - 'driver': (driver == self.driver)}) + if ( + not driver.is_domain_aware() + and driver == self.driver + and domain_id != CONF.identity.default_domain_id + and domain_id is not None + ): + LOG.warning( + 'Found multiple domains being mapped to a ' + 'driver that does not support that (e.g. ' + 'LDAP) - Domain ID: %(domain)s, ' + 'Default Driver: %(driver)s', + {'domain': domain_id, 'driver': (driver == self.driver)}, + ) raise exception.DomainNotFound(domain_id=domain_id) return driver @@ -835,7 +931,8 @@ class Manager(manager.Manager): return ( local_id_ref['domain_id'], self._select_identity_driver(local_id_ref['domain_id']), - local_id_ref['local_id']) + local_id_ref['local_id'], + ) # So either we are using multiple drivers but the public ID is invalid # (and hence was not found in the mapping table), or the public ID is @@ -865,7 +962,8 @@ class Manager(manager.Manager): return ( local_id_ref['domain_id'], driver, - local_id_ref['local_id']) + local_id_ref['local_id'], + ) else: raise exception.PublicIDNotFound(id=public_id) @@ -880,7 +978,8 @@ class Manager(manager.Manager): return (conf.default_domain_id, driver, public_id) def _assert_user_and_group_in_same_backend( - self, user_entity_id, user_driver, group_entity_id, group_driver): + self, user_entity_id, user_driver, group_entity_id, group_driver + ): """Ensure that user and group IDs are backed by the same backend. Raise a CrossBackendNotAllowed exception if they are not from the same @@ -895,19 +994,23 @@ class Manager(manager.Manager): group_driver.get_group(group_entity_id) # If we get here, then someone is attempting to create a cross # backend membership, which is not allowed. - raise exception.CrossBackendNotAllowed(group_id=group_entity_id, - user_id=user_entity_id) + raise exception.CrossBackendNotAllowed( + group_id=group_entity_id, user_id=user_entity_id + ) def _mark_domain_id_filter_satisfied(self, hints): if hints: for filter in hints.filters: - if (filter['name'] == 'domain_id' and - filter['comparator'] == 'equals'): + if ( + filter['name'] == 'domain_id' + and filter['comparator'] == 'equals' + ): hints.filters.remove(filter) def _ensure_domain_id_in_hints(self, hints, domain_id): - if (domain_id is not None and - not hints.get_exact_filter_by_name('domain_id')): + if domain_id is not None and not hints.get_exact_filter_by_name( + 'domain_id' + ): hints.add_filter('domain_id', domain_id) def _set_list_limit_in_hints(self, hints, driver): @@ -955,11 +1058,13 @@ class Manager(manager.Manager): @domains_configured @exception_translated('assertion') def authenticate(self, user_id, password): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + user_id + ) ref = driver.authenticate(entity_id, password) ref = self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) + ref, domain_id, driver, mapping.EntityType.USER + ) ref = self._shadow_nonlocal_user(ref) PROVIDERS.shadow_users_api.set_last_active_at(ref['id']) return ref @@ -972,10 +1077,13 @@ class Manager(manager.Manager): default_project_id ) if project_ref['is_domain'] is True: - msg = _("User's default project ID cannot be a " - "domain ID: %s") + msg = _( + "User's default project ID cannot be a " + "domain ID: %s" + ) raise exception.ValidationError( - message=(msg % default_project_id)) + message=(msg % default_project_id) + ) except exception.ProjectNotFound: # should be idempotent if project is not found so that it is # backward compatible @@ -987,18 +1095,24 @@ class Manager(manager.Manager): try: self.federation_api.get_idp(fed_obj['idp_id']) except exception.IdentityProviderNotFound: - msg = (_("Could not find Identity Provider: %s") - % fed_obj['idp_id']) + msg = ( + _("Could not find Identity Provider: %s") + % fed_obj['idp_id'] + ) raise exception.ValidationError(msg) for protocol in fed_obj['protocols']: try: - self.federation_api.get_protocol(fed_obj['idp_id'], - protocol['protocol_id']) + self.federation_api.get_protocol( + fed_obj['idp_id'], protocol['protocol_id'] + ) except exception.FederatedProtocolNotFound: - msg = (_("Could not find federated protocol " - "%(protocol)s for Identity Provider: %(idp)s.") - % {'protocol': protocol['protocol_id'], - 'idp': fed_obj['idp_id']}) + msg = _( + "Could not find federated protocol " + "%(protocol)s for Identity Provider: %(idp)s." + ) % { + 'protocol': protocol['protocol_id'], + 'idp': fed_obj['idp_id'], + } raise exception.ValidationError(msg) def _create_federated_objects(self, user_ref, fed_obj_list): @@ -1009,10 +1123,9 @@ class Manager(manager.Manager): 'idp_id': fed_obj['idp_id'], 'protocol_id': protocols['protocol_id'], 'unique_id': protocols['unique_id'], - 'display_name': user_ref['name'] + 'display_name': user_ref['name'], } - self.shadow_users_api.create_federated_object( - federated_dict) + self.shadow_users_api.create_federated_object(federated_dict) def _create_user_with_federated_objects(self, user, driver): # If the user did not pass a federated object along inside the user @@ -1044,7 +1157,8 @@ class Manager(manager.Manager): PROVIDERS.resource_api.get_domain(domain_id) self._assert_default_project_id_is_not_domain( - user_ref.get('default_project_id')) + user_ref.get('default_project_id') + ) # For creating a user, the domain is in the object itself domain_id = user_ref['domain_id'] @@ -1057,21 +1171,24 @@ class Manager(manager.Manager): ref = self._create_user_with_federated_objects(user, driver) notifications.Audit.created(self._USER, user['id'], initiator) return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) + ref, domain_id, driver, mapping.EntityType.USER + ) @domains_configured @exception_translated('user') @MEMOIZE def get_user(self, user_id): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + user_id + ) ref = driver.get_user(entity_id) # Add user's federated objects fed_objects = self.shadow_users_api.get_federated_objects(user_id) if fed_objects: ref['federated'] = fed_objects return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) + ref, domain_id, driver, mapping.EntityType.USER + ) def assert_user_enabled(self, user_id, user=None): """Assert the user and the user's domain are enabled. @@ -1091,7 +1208,8 @@ class Manager(manager.Manager): driver = self._select_identity_driver(domain_id) ref = driver.get_user_by_name(user_name, domain_id) return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) + ref, domain_id, driver, mapping.EntityType.USER + ) def _translate_expired_password_hints(self, hints): """Clean Up Expired Password Hints. @@ -1107,9 +1225,14 @@ class Manager(manager.Manager): or ValidationTimeStampError exception respectively if invalid. """ - operators = {'lt': operator.lt, 'gt': operator.gt, - 'eq': operator.eq, 'lte': operator.le, - 'gte': operator.ge, 'neq': operator.ne} + operators = { + 'lt': operator.lt, + 'gt': operator.gt, + 'eq': operator.eq, + 'lte': operator.le, + 'gte': operator.ge, + 'neq': operator.ne, + } for filter_ in hints.filters: if 'password_expires_at' == filter_['name']: # password_expires_at must be in the format @@ -1143,7 +1266,8 @@ class Manager(manager.Manager): if filter_['name'] == 'name': fed_hints = copy.deepcopy(hints) fed_res = PROVIDERS.shadow_users_api.get_federated_users( - fed_hints) + fed_hints + ) break return driver.list_users(hints) + fed_res @@ -1164,7 +1288,8 @@ class Manager(manager.Manager): hints = self._translate_expired_password_hints(hints) ref_list = self._handle_shadow_and_local_users(driver, hints) return self._set_domain_id_and_mapping( - ref_list, domain_scope, driver, mapping.EntityType.USER) + ref_list, domain_scope, driver, mapping.EntityType.USER + ) def _require_matching_domain_id(self, new_ref, orig_ref): """Ensure the current domain ID matches the reference one, if any. @@ -1192,7 +1317,8 @@ class Manager(manager.Manager): del user['federated'] user = driver.update_user(entity_id, user) fed_objects = self.shadow_users_api.get_federated_objects( - user['id']) + user['id'] + ) if fed_objects: user['federated'] = fed_objects return user @@ -1227,21 +1353,25 @@ class Manager(manager.Manager): user.pop('id') self._assert_default_project_id_is_not_domain( - user_ref.get('default_project_id')) + user_ref.get('default_project_id') + ) - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + user_id + ) user = self._clear_domain_id_if_domain_unaware(driver, user) self.get_user.invalidate(self, old_user_ref['id']) - self.get_user_by_name.invalidate(self, old_user_ref['name'], - old_user_ref['domain_id']) + self.get_user_by_name.invalidate( + self, old_user_ref['name'], old_user_ref['domain_id'] + ) ref = self._update_user_with_federated_objects(user, driver, entity_id) notifications.Audit.updated(self._USER, user_id, initiator) - enabled_change = ((user.get('enabled') is False) and - user['enabled'] != old_user_ref.get('enabled')) + enabled_change = (user.get('enabled') is False) and user[ + 'enabled' + ] != old_user_ref.get('enabled') if enabled_change or user.get('password') is not None: self._persist_revocation_event_for_user(user_id) reason = ( @@ -1253,13 +1383,15 @@ class Manager(manager.Manager): notifications.invalidate_token_cache_notification(reason) return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) + ref, domain_id, driver, mapping.EntityType.USER + ) @domains_configured @exception_translated('user') def delete_user(self, user_id, initiator=None): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + user_id + ) # Get user details to invalidate the cache. user_old = self.get_user(user_id) @@ -1269,8 +1401,9 @@ class Manager(manager.Manager): driver.delete_user(entity_id) PROVIDERS.assignment_api.delete_user_assignments(user_id) self.get_user.invalidate(self, user_id) - self.get_user_by_name.invalidate(self, user_old['name'], - user_old['domain_id']) + self.get_user_by_name.invalidate( + self, user_old['name'], user_old['domain_id'] + ) PROVIDERS.credential_api.delete_credentials_for_user(user_id) PROVIDERS.id_mapping_api.delete_id_mapping(user_id) @@ -1302,17 +1435,20 @@ class Manager(manager.Manager): notifications.Audit.created(self._GROUP, group['id'], initiator) return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.GROUP) + ref, domain_id, driver, mapping.EntityType.GROUP + ) @domains_configured @exception_translated('group') @MEMOIZE def get_group(self, group_id): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + group_id + ) ref = driver.get_group(entity_id) return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.GROUP) + ref, domain_id, driver, mapping.EntityType.GROUP + ) @domains_configured @exception_translated('group') @@ -1320,15 +1456,17 @@ class Manager(manager.Manager): driver = self._select_identity_driver(domain_id) ref = driver.get_group_by_name(group_name, domain_id) return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.GROUP) + ref, domain_id, driver, mapping.EntityType.GROUP + ) @domains_configured @exception_translated('group') def update_group(self, group_id, group, initiator=None): old_group_ref = self.get_group(group_id) self._require_matching_domain_id(group, old_group_ref) - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + group_id + ) group = self._clear_domain_id_if_domain_unaware(driver, group) if 'name' in group: group['name'] = group['name'].strip() @@ -1336,13 +1474,15 @@ class Manager(manager.Manager): self.get_group.invalidate(self, group_id) notifications.Audit.updated(self._GROUP, group_id, initiator) return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.GROUP) + ref, domain_id, driver, mapping.EntityType.GROUP + ) @domains_configured @exception_translated('group') def delete_group(self, group_id, initiator=None): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + group_id + ) roles = PROVIDERS.assignment_api.list_role_assignments( group_id=group_id ) @@ -1379,22 +1519,26 @@ class Manager(manager.Manager): return self._get_domain_driver_and_entity_id(public_id) _domain_id, group_driver, group_entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) + self._get_domain_driver_and_entity_id(group_id) + ) # Get the same info for the user_id, taking care to map any # exceptions correctly - _domain_id, user_driver, user_entity_id = ( - get_entity_info_for_user(user_id)) + _domain_id, user_driver, user_entity_id = get_entity_info_for_user( + user_id + ) self._assert_user_and_group_in_same_backend( - user_entity_id, user_driver, group_entity_id, group_driver) + user_entity_id, user_driver, group_entity_id, group_driver + ) group_driver.add_user_to_group(user_entity_id, group_entity_id) # Invalidate user role assignments cache region, as it may now need to # include role assignments from the specified group to its users assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() - notifications.Audit.added_to(self._GROUP, group_id, self._USER, - user_id, initiator) + notifications.Audit.added_to( + self._GROUP, group_id, self._USER, user_id, initiator + ) @domains_configured @exception_translated('group') @@ -1404,14 +1548,17 @@ class Manager(manager.Manager): return self._get_domain_driver_and_entity_id(public_id) _domain_id, group_driver, group_entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) + self._get_domain_driver_and_entity_id(group_id) + ) # Get the same info for the user_id, taking care to map any # exceptions correctly - _domain_id, user_driver, user_entity_id = ( - get_entity_info_for_user(user_id)) + _domain_id, user_driver, user_entity_id = get_entity_info_for_user( + user_id + ) self._assert_user_and_group_in_same_backend( - user_entity_id, user_driver, group_entity_id, group_driver) + user_entity_id, user_driver, group_entity_id, group_driver + ) group_driver.remove_user_from_group(user_entity_id, group_entity_id) self._persist_revocation_event_for_user(user_id) @@ -1423,14 +1570,16 @@ class Manager(manager.Manager): 'Invalidating the token cache because user %(user_id)s was ' 'removed from group %(group_id)s. Authorization will be ' 'calculated and enforced accordingly the next time they ' - 'authenticate or validate a token.' % { + 'authenticate or validate a token.' + % { 'user_id': user_id, 'group_id': group_id, } ) notifications.invalidate_token_cache_notification(reason) - notifications.Audit.removed_from(self._GROUP, group_id, self._USER, - user_id, initiator) + notifications.Audit.removed_from( + self._GROUP, group_id, self._USER, user_id, initiator + ) def _persist_revocation_event_for_user(self, user_id): """Emit a notification to invoke a revocation event callback. @@ -1448,8 +1597,9 @@ class Manager(manager.Manager): @domains_configured @exception_translated('user') def list_groups_for_user(self, user_id, hints=None): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + user_id + ) self._set_list_limit_in_hints(hints, driver) hints = hints or driver_hints.Hints() if not driver.is_domain_aware(): @@ -1461,7 +1611,8 @@ class Manager(manager.Manager): if 'membership_expires_at' not in ref: ref['membership_expires_at'] = None return self._set_domain_id_and_mapping( - ref_list, domain_id, driver, mapping.EntityType.GROUP) + ref_list, domain_id, driver, mapping.EntityType.GROUP + ) @domains_configured @exception_translated('group') @@ -1479,13 +1630,15 @@ class Manager(manager.Manager): self._mark_domain_id_filter_satisfied(hints) ref_list = driver.list_groups(hints) return self._set_domain_id_and_mapping( - ref_list, domain_scope, driver, mapping.EntityType.GROUP) + ref_list, domain_scope, driver, mapping.EntityType.GROUP + ) @domains_configured @exception_translated('group') def list_users_in_group(self, group_id, hints=None): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + group_id + ) self._set_list_limit_in_hints(hints, driver) hints = hints or driver_hints.Hints() if not driver.is_domain_aware(): @@ -1495,7 +1648,8 @@ class Manager(manager.Manager): hints = self._translate_expired_password_hints(hints) ref_list = driver.list_users_in_group(entity_id, hints) return self._set_domain_id_and_mapping( - ref_list, domain_id, driver, mapping.EntityType.USER) + ref_list, domain_id, driver, mapping.EntityType.USER + ) @domains_configured @exception_translated('group') @@ -1505,21 +1659,26 @@ class Manager(manager.Manager): return self._get_domain_driver_and_entity_id(public_id) _domain_id, group_driver, group_entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) + self._get_domain_driver_and_entity_id(group_id) + ) # Get the same info for the user_id, taking care to map any # exceptions correctly - _domain_id, user_driver, user_entity_id = ( - get_entity_info_for_user(user_id)) + _domain_id, user_driver, user_entity_id = get_entity_info_for_user( + user_id + ) self._assert_user_and_group_in_same_backend( - user_entity_id, user_driver, group_entity_id, group_driver) + user_entity_id, user_driver, group_entity_id, group_driver + ) - return group_driver.check_user_in_group(user_entity_id, - group_entity_id) + return group_driver.check_user_in_group( + user_entity_id, group_entity_id + ) @domains_configured - def change_password(self, user_id, original_password, - new_password, initiator=None): + def change_password( + self, user_id, original_password, new_password, initiator=None + ): # authenticate() will raise an AssertionError if authentication fails try: @@ -1528,15 +1687,17 @@ class Manager(manager.Manager): # If a password has expired, we want users to be able to change it pass - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) + domain_id, driver, entity_id = self._get_domain_driver_and_entity_id( + user_id + ) try: validators.validate_password(new_password) driver.change_password(entity_id, new_password) except exception.PasswordValidationError as ex: audit_reason = reason.Reason(str(ex), str(ex.code)) - notifications.Audit.updated(self._USER, user_id, - initiator, reason=audit_reason) + notifications.Audit.updated( + self._USER, user_id, initiator, reason=audit_reason + ) raise notifications.Audit.updated(self._USER, user_id, initiator) @@ -1563,13 +1724,17 @@ class Manager(manager.Manager): LOG.debug("Trying to update name for federated user [%s].", user) PROVIDERS.shadow_users_api.update_federated_user_display_name( - idp_id, protocol_id, user['id'], user['name']) + idp_id, protocol_id, user['id'], user['name'] + ) user_dict = PROVIDERS.shadow_users_api.get_federated_user( - idp_id, protocol_id, user['id']) + idp_id, protocol_id, user['id'] + ) if email: - LOG.debug("Executing the e-mail update for federated user " - "[%s].", user) + LOG.debug( + "Executing the e-mail update for federated user " "[%s].", + user, + ) user_ref = {"email": email} self.update_user(user_dict['id'], user_ref) @@ -1579,14 +1744,11 @@ class Manager(manager.Manager): 'idp_id': idp_id, 'protocol_id': protocol_id, 'unique_id': user['id'], - 'display_name': user['name'] + 'display_name': user['name'], } LOG.debug("Creating federated user [%s].", user) - user_dict = ( - PROVIDERS.shadow_users_api.create_federated_user( - user["domain"]['id'], - federated_dict, email=email - ) + user_dict = PROVIDERS.shadow_users_api.create_federated_user( + user["domain"]['id'], federated_dict, email=email ) PROVIDERS.shadow_users_api.set_last_active_at(user_dict['id']) return user_dict @@ -1607,10 +1769,12 @@ class Manager(manager.Manager): # however we need to update the expiring group memberships. if group_ids: for group_id in group_ids: - LOG.info("Adding user [%s] to group [%s].", - user_dict, group_id) + LOG.info( + "Adding user [%s] to group [%s].", user_dict, group_id + ) PROVIDERS.shadow_users_api.add_user_to_group_expires( - user_dict['id'], group_id) + user_dict['id'], group_id + ) return user_dict @@ -1625,14 +1789,20 @@ class MappingManager(manager.Manager): @MEMOIZE_ID_MAPPING def _get_public_id(self, domain_id, local_id, entity_type): - return self.driver.get_public_id({'domain_id': domain_id, - 'local_id': local_id, - 'entity_type': entity_type}) + return self.driver.get_public_id( + { + 'domain_id': domain_id, + 'local_id': local_id, + 'entity_type': entity_type, + } + ) def get_public_id(self, local_entity): - return self._get_public_id(local_entity['domain_id'], - local_entity['local_id'], - local_entity['entity_type']) + return self._get_public_id( + local_entity['domain_id'], + local_entity['local_id'], + local_entity['entity_type'], + ) @MEMOIZE_ID_MAPPING def get_id_mapping(self, public_id): @@ -1641,10 +1811,13 @@ class MappingManager(manager.Manager): def create_id_mapping(self, local_entity, public_id=None): public_id = self.driver.create_id_mapping(local_entity, public_id) if MEMOIZE_ID_MAPPING.should_cache(public_id): - self._get_public_id.set(public_id, self, - local_entity['domain_id'], - local_entity['local_id'], - local_entity['entity_type']) + self._get_public_id.set( + public_id, + self, + local_entity['domain_id'], + local_entity['local_id'], + local_entity['entity_type'], + ) self.get_id_mapping.set(local_entity, self, public_id) return public_id @@ -1653,9 +1826,12 @@ class MappingManager(manager.Manager): self.driver.delete_id_mapping(public_id) # Delete the key of entity from cache if local_entity: - self._get_public_id.invalidate(self, local_entity['domain_id'], - local_entity['local_id'], - local_entity['entity_type']) + self._get_public_id.invalidate( + self, + local_entity['domain_id'], + local_entity['local_id'], + local_entity['entity_type'], + ) self.get_id_mapping.invalidate(self, public_id) def purge_mappings(self, purge_filter): diff --git a/keystone/identity/mapping_backends/base.py b/keystone/identity/mapping_backends/base.py index 1d7499b1b0..2fee949578 100644 --- a/keystone/identity/mapping_backends/base.py +++ b/keystone/identity/mapping_backends/base.py @@ -18,8 +18,9 @@ from keystone.common import provider_api from keystone import exception -class MappingDriverBase(provider_api.ProviderAPIMixin, object, - metaclass=abc.ABCMeta): +class MappingDriverBase( + provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta +): """Interface description for an ID Mapping driver.""" @abc.abstractmethod diff --git a/keystone/identity/mapping_backends/sql.py b/keystone/identity/mapping_backends/sql.py index ea0e882226..65e1529f3c 100644 --- a/keystone/identity/mapping_backends/sql.py +++ b/keystone/identity/mapping_backends/sql.py @@ -24,14 +24,18 @@ class IDMapping(sql.ModelBase, sql.ModelDictMixin): local_id = sql.Column(sql.String(255), nullable=False) # NOTE(henry-nash): Postgres requires a name to be defined for an Enum entity_type = sql.Column( - sql.Enum(identity_mapping.EntityType.USER, - identity_mapping.EntityType.GROUP, - name='entity_type'), - nullable=False) + sql.Enum( + identity_mapping.EntityType.USER, + identity_mapping.EntityType.GROUP, + name='entity_type', + ), + nullable=False, + ) # Unique constraint to ensure you can't store more than one mapping to the # same underlying values __table_args__ = ( - sql.UniqueConstraint('domain_id', 'local_id', 'entity_type'),) + sql.UniqueConstraint('domain_id', 'local_id', 'entity_type'), + ) class Mapping(base.MappingDriverBase): @@ -74,7 +78,8 @@ class Mapping(base.MappingDriverBase): with sql.session_for_write() as session: if public_id is None: public_id = self.id_generator_api.generate_public_ID( - entity) + entity + ) entity['public_id'] = public_id mapping_ref = IDMapping.from_dict(entity) session.add(mapping_ref) @@ -87,7 +92,8 @@ class Mapping(base.MappingDriverBase): with sql.session_for_write() as session: try: session.query(IDMapping).filter( - IDMapping.public_id == public_id).delete() + IDMapping.public_id == public_id + ).delete() except sql.NotFound: # nosec # NOTE(morganfainberg): There is nothing to delete and nothing # to do. @@ -104,5 +110,6 @@ class Mapping(base.MappingDriverBase): query = query.filter_by(local_id=purge_filter['local_id']) if 'entity_type' in purge_filter: query = query.filter_by( - entity_type=purge_filter['entity_type']) + entity_type=purge_filter['entity_type'] + ) query.delete() diff --git a/keystone/identity/schema.py b/keystone/identity/schema.py index 0031578cbd..1c1d5f4b51 100644 --- a/keystone/identity/schema.py +++ b/keystone/identity/schema.py @@ -23,7 +23,7 @@ _identity_name = { 'type': 'string', 'minLength': 1, 'maxLength': 255, - 'pattern': r'[\S]+' + 'pattern': r'[\S]+', } # Schema for Identity v3 API @@ -35,33 +35,29 @@ _user_properties = { 'enabled': parameter_types.boolean, 'federated': { 'type': 'array', - 'items': - { - 'type': 'object', - 'properties': { - 'idp_id': {'type': 'string'}, - 'protocols': { - 'type': 'array', - 'items': - { - 'type': 'object', - 'properties': { - 'protocol_id': {'type': 'string'}, - 'unique_id': {'type': 'string'} - }, - 'required': ['protocol_id', 'unique_id'] - }, - 'minItems': 1 - } + 'items': { + 'type': 'object', + 'properties': { + 'idp_id': {'type': 'string'}, + 'protocols': { + 'type': 'array', + 'items': { + 'type': 'object', + 'properties': { + 'protocol_id': {'type': 'string'}, + 'unique_id': {'type': 'string'}, + }, + 'required': ['protocol_id', 'unique_id'], + }, + 'minItems': 1, }, - 'required': ['idp_id', 'protocols'] }, + 'required': ['idp_id', 'protocols'], + }, }, 'name': _identity_name, - 'password': { - 'type': ['string', 'null'] - }, - 'options': ro.USER_OPTIONS_REGISTRY.json_schema + 'password': {'type': ['string', 'null']}, + 'options': ro.USER_OPTIONS_REGISTRY.json_schema, } # TODO(notmorgan): Provide a mechanism for options to supply real jsonschema @@ -70,62 +66,56 @@ user_create = { 'type': 'object', 'properties': _user_properties, 'required': ['name'], - 'options': { - 'type': 'object' - }, - 'additionalProperties': True + 'options': {'type': 'object'}, + 'additionalProperties': True, } user_update = { 'type': 'object', 'properties': _user_properties, 'minProperties': 1, - 'options': { - 'type': 'object' - }, - 'additionalProperties': True + 'options': {'type': 'object'}, + 'additionalProperties': True, } _group_properties = { 'description': validation.nullable(parameter_types.description), 'domain_id': parameter_types.id_string, - 'name': _identity_name + 'name': _identity_name, } group_create = { 'type': 'object', 'properties': _group_properties, 'required': ['name'], - 'additionalProperties': True + 'additionalProperties': True, } group_update = { 'type': 'object', 'properties': _group_properties, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } _password_change_properties = { - 'original_password': { - 'type': 'string' - }, - 'password': { - 'type': 'string' - } + 'original_password': {'type': 'string'}, + 'password': {'type': 'string'}, } if getattr(CONF, 'strict_password_check', None): - _password_change_properties['password']['maxLength'] = \ - CONF.identity.max_password_length + _password_change_properties['password'][ + 'maxLength' + ] = CONF.identity.max_password_length if getattr(CONF, 'security_compliance', None): if getattr(CONF.security_compliance, 'password_regex', None): - _password_change_properties['password']['pattern'] = \ - CONF.security_compliance.password_regex + _password_change_properties['password'][ + 'pattern' + ] = CONF.security_compliance.password_regex password_change = { 'type': 'object', 'properties': _password_change_properties, 'required': ['original_password', 'password'], - 'additionalProperties': False + 'additionalProperties': False, } diff --git a/keystone/identity/shadow_backends/base.py b/keystone/identity/shadow_backends/base.py index 134f86fec5..4106a242b3 100644 --- a/keystone/identity/shadow_backends/base.py +++ b/keystone/identity/shadow_backends/base.py @@ -34,15 +34,13 @@ def federated_objects_to_list(fed_ref): fed = {} for fed_dict in fed_ref: fed.setdefault( - fed_dict['idp_id'], + fed_dict['idp_id'], {'idp_id': fed_dict['idp_id'], 'protocols': []} + )['protocols'].append( { - 'idp_id': fed_dict['idp_id'], - 'protocols': [] + 'protocol_id': fed_dict['protocol_id'], + 'unique_id': fed_dict['unique_id'], } - )['protocols'].append({ - 'protocol_id': fed_dict['protocol_id'], - 'unique_id': fed_dict['unique_id'] - }) + ) return list(fed.values()) @@ -100,8 +98,9 @@ class ShadowUsersDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() @abc.abstractmethod - def update_federated_user_display_name(self, idp_id, protocol_id, - unique_id, display_name): + def update_federated_user_display_name( + self, idp_id, protocol_id, unique_id, display_name + ): """Update federated user's display name if changed. :param idp_id: The identity provider ID diff --git a/keystone/identity/shadow_backends/sql.py b/keystone/identity/shadow_backends/sql.py index a91902b1c7..039794dcae 100644 --- a/keystone/identity/shadow_backends/sql.py +++ b/keystone/identity/shadow_backends/sql.py @@ -33,17 +33,15 @@ class ShadowUsers(base.ShadowUsersDriverBase): @sql.handle_conflicts(conflict_type='federated_user') def create_federated_user(self, domain_id, federated_dict, email=None): - local_entity = {'domain_id': domain_id, - 'local_id': federated_dict['unique_id'], - 'entity_type': 'user'} + local_entity = { + 'domain_id': domain_id, + 'local_id': federated_dict['unique_id'], + 'entity_type': 'user', + } public_id = PROVIDERS.id_generator_api.generate_public_ID(local_entity) - user = { - 'id': public_id, - 'domain_id': domain_id, - 'enabled': True - } + user = {'id': public_id, 'domain_id': domain_id, 'enabled': True} if email: user['email'] = email with sql.session_for_write() as session: @@ -75,7 +73,8 @@ class ShadowUsers(base.ShadowUsersDriverBase): m = model.FederatedUser( idp_id=row.idp_id, protocol_id=row.protocol_id, - unique_id=row.unique_id) + unique_id=row.unique_id, + ) fed_ref.append(m.to_dict()) return base.federated_objects_to_list(fed_ref) @@ -84,28 +83,35 @@ class ShadowUsers(base.ShadowUsersDriverBase): for filter_ in hints.filters: if filter_['name'] == 'idp_id': statements.append( - model.FederatedUser.idp_id == filter_['value']) + model.FederatedUser.idp_id == filter_['value'] + ) if filter_['name'] == 'protocol_id': statements.append( - model.FederatedUser.protocol_id == filter_['value']) + model.FederatedUser.protocol_id == filter_['value'] + ) if filter_['name'] == 'unique_id': statements.append( - model.FederatedUser.unique_id == filter_['value']) + model.FederatedUser.unique_id == filter_['value'] + ) # Remove federated attributes to prevent redundancies from # sql.filter_limit_query which filters remaining hints hints.filters = [ - x for x in hints.filters if x['name'] not in ('idp_id', - 'protocol_id', - 'unique_id')] + x + for x in hints.filters + if x['name'] not in ('idp_id', 'protocol_id', 'unique_id') + ] if statements: query = query.filter(sqlalchemy.and_(*statements)) return query def get_federated_users(self, hints): with sql.session_for_read() as session: - query = session.query(model.User).outerjoin( - model.LocalUser).outerjoin(model.FederatedUser) + query = ( + session.query(model.User) + .outerjoin(model.LocalUser) + .outerjoin(model.FederatedUser) + ) query = query.filter(model.User.id == model.FederatedUser.user_id) query = self._update_query_with_federated_statements(hints, query) name_filter = None @@ -113,8 +119,9 @@ class ShadowUsers(base.ShadowUsersDriverBase): if filter_['name'] == 'name': name_filter = filter_ query = query.filter( - model.FederatedUser.display_name == name_filter[ - 'value']) + model.FederatedUser.display_name + == name_filter['value'] + ) break if name_filter: hints.filters.remove(name_filter) @@ -145,8 +152,9 @@ class ShadowUsers(base.ShadowUsersDriverBase): query = session.query(model.User).outerjoin(model.LocalUser) query = query.join(model.FederatedUser) query = query.filter(model.FederatedUser.idp_id == idp_id) - query = query.filter(model.FederatedUser.protocol_id == - protocol_id) + query = query.filter( + model.FederatedUser.protocol_id == protocol_id + ) query = query.filter(model.FederatedUser.unique_id == unique_id) try: user_ref = query.one() @@ -162,16 +170,19 @@ class ShadowUsers(base.ShadowUsersDriverBase): user_ref.last_active_at = datetime.datetime.utcnow().date() @sql.handle_conflicts(conflict_type='federated_user') - def update_federated_user_display_name(self, idp_id, protocol_id, - unique_id, display_name): + def update_federated_user_display_name( + self, idp_id, protocol_id, unique_id, display_name + ): with sql.session_for_write() as session: query = session.query(model.FederatedUser) query = query.filter(model.FederatedUser.idp_id == idp_id) - query = query.filter(model.FederatedUser.protocol_id == - protocol_id) + query = query.filter( + model.FederatedUser.protocol_id == protocol_id + ) query = query.filter(model.FederatedUser.unique_id == unique_id) - query = query.filter(model.FederatedUser.display_name != - display_name) + query = query.filter( + model.FederatedUser.display_name != display_name + ) query.update({'display_name': display_name}) return @@ -182,12 +193,11 @@ class ShadowUsers(base.ShadowUsersDriverBase): new_user_dict.pop('name', None) new_user_dict.pop('password', None) # create nonlocal_user dict - new_nonlocal_user_dict = { - 'name': user_dict['name'] - } + new_nonlocal_user_dict = {'name': user_dict['name']} with sql.session_for_write() as session: new_nonlocal_user_ref = model.NonLocalUser.from_dict( - new_nonlocal_user_dict) + new_nonlocal_user_dict + ) new_user_ref = model.User.from_dict(new_user_dict) new_user_ref.created_at = datetime.datetime.utcnow() new_user_ref.nonlocal_user = new_nonlocal_user_ref @@ -219,8 +229,9 @@ class ShadowUsers(base.ShadowUsersDriverBase): def list_federated_users_info(self, hints=None): with sql.session_for_read() as session: query = session.query(model.FederatedUser) - fed_user_refs = sql.filter_limit_query(model.FederatedUser, query, - hints) + fed_user_refs = sql.filter_limit_query( + model.FederatedUser, query, hints + ) return [x.to_dict() for x in fed_user_refs] def add_user_to_group_expires(self, user_id, group_id): @@ -245,9 +256,11 @@ class ShadowUsers(base.ShadowUsersDriverBase): if membership: membership.last_verified = datetime.datetime.utcnow() else: - session.add(model.ExpiringUserGroupMembership( - user_id=user_id, - group_id=group_id, - idp_id=user.idp_id, - last_verified=datetime.datetime.utcnow() - )) + session.add( + model.ExpiringUserGroupMembership( + user_id=user_id, + group_id=group_id, + idp_id=user.idp_id, + last_verified=datetime.datetime.utcnow(), + ) + ) diff --git a/keystone/limit/backends/sql.py b/keystone/limit/backends/sql.py index b2ce6c13b6..254907c54a 100644 --- a/keystone/limit/backends/sql.py +++ b/keystone/limit/backends/sql.py @@ -35,15 +35,15 @@ class RegisteredLimitModel(sql.ModelBase, sql.ModelDictMixin): 'region_id', 'resource_name', 'default_limit', - 'description' + 'description', ] internal_id = sql.Column(sql.Integer, primary_key=True, nullable=False) id = sql.Column(sql.String(length=64), nullable=False, unique=True) - service_id = sql.Column(sql.String(255), - sql.ForeignKey('service.id')) - region_id = sql.Column(sql.String(64), - sql.ForeignKey('region.id'), nullable=True) + service_id = sql.Column(sql.String(255), sql.ForeignKey('service.id')) + region_id = sql.Column( + sql.String(64), sql.ForeignKey('region.id'), nullable=True + ) resource_name = sql.Column(sql.String(255)) default_limit = sql.Column(sql.Integer, nullable=False) description = sql.Column(sql.Text()) @@ -66,7 +66,7 @@ class LimitModel(sql.ModelBase, sql.ModelDictMixin): 'resource_name', 'resource_limit', 'description', - 'registered_limit_id' + 'registered_limit_id', ] internal_id = sql.Column(sql.Integer, primary_key=True, nullable=False) @@ -75,8 +75,9 @@ class LimitModel(sql.ModelBase, sql.ModelDictMixin): domain_id = sql.Column(sql.String(64)) resource_limit = sql.Column(sql.Integer, nullable=False) description = sql.Column(sql.Text()) - registered_limit_id = sql.Column(sql.String(64), - sql.ForeignKey('registered_limit.id')) + registered_limit_id = sql.Column( + sql.String(64), sql.ForeignKey('registered_limit.id') + ) registered_limit = sqlalchemy.orm.relationship('RegisteredLimitModel') @@ -123,8 +124,9 @@ class LimitModel(sql.ModelBase, sql.ModelDictMixin): class UnifiedLimit(base.UnifiedLimitDriverBase): - def _check_unified_limit_unique(self, unified_limit, - is_registered_limit=True): + def _check_unified_limit_unique( + self, unified_limit, is_registered_limit=True + ): # Ensure the new created or updated unified limit won't break the # current reference between registered limit and limit. i.e. We should # ensure that there is no duplicate entry. @@ -135,22 +137,25 @@ class UnifiedLimit(base.UnifiedLimitDriverBase): hints.add_filter('region_id', unified_limit.get('region_id')) with sql.session_for_read() as session: query = session.query(RegisteredLimitModel) - unified_limits = sql.filter_limit_query(RegisteredLimitModel, - query, - hints).all() + unified_limits = sql.filter_limit_query( + RegisteredLimitModel, query, hints + ).all() else: - hints.add_filter('registered_limit_id', - unified_limit['registered_limit_id']) - is_project_limit = (True if unified_limit.get('project_id') - else False) + hints.add_filter( + 'registered_limit_id', unified_limit['registered_limit_id'] + ) + is_project_limit = ( + True if unified_limit.get('project_id') else False + ) if is_project_limit: hints.add_filter('project_id', unified_limit['project_id']) else: hints.add_filter('domain_id', unified_limit['domain_id']) with sql.session_for_read() as session: query = session.query(LimitModel) - unified_limits = sql.filter_limit_query(LimitModel, query, - hints).all() + unified_limits = sql.filter_limit_query( + LimitModel, query, hints + ).all() if unified_limits: msg = _('Duplicate entry') @@ -162,7 +167,8 @@ class UnifiedLimit(base.UnifiedLimitDriverBase): # is no reference limit. with sql.session_for_read() as session: limits = session.query(LimitModel).filter_by( - registered_limit_id=registered_limit['id']) + registered_limit_id=registered_limit['id'] + ) if limits.all(): raise exception.RegisteredLimitError(id=registered_limit.id) @@ -185,16 +191,16 @@ class UnifiedLimit(base.UnifiedLimitDriverBase): self._check_referenced_limit_reference(ref) old_dict = ref.to_dict() old_dict.update(registered_limit) - if (registered_limit.get('service_id') or - 'region_id' in registered_limit or - registered_limit.get('resource_name')): + if ( + registered_limit.get('service_id') + or 'region_id' in registered_limit + or registered_limit.get('resource_name') + ): self._check_unified_limit_unique(old_dict) - new_registered_limit = RegisteredLimitModel.from_dict( - old_dict) + new_registered_limit = RegisteredLimitModel.from_dict(old_dict) for attr in registered_limit: if attr != 'id': - setattr(ref, attr, getattr(new_registered_limit, - attr)) + setattr(ref, attr, getattr(new_registered_limit, attr)) return ref.to_dict() except db_exception.DBReferenceError: raise exception.RegisteredLimitError(id=registered_limit_id) @@ -203,14 +209,15 @@ class UnifiedLimit(base.UnifiedLimitDriverBase): def list_registered_limits(self, hints): with sql.session_for_read() as session: registered_limits = session.query(RegisteredLimitModel) - registered_limits = sql.filter_limit_query(RegisteredLimitModel, - registered_limits, - hints) + registered_limits = sql.filter_limit_query( + RegisteredLimitModel, registered_limits, hints + ) return [s.to_dict() for s in registered_limits] def _get_registered_limit(self, session, registered_limit_id): query = session.query(RegisteredLimitModel).filter_by( - id=registered_limit_id) + id=registered_limit_id + ) ref = query.first() if ref is None: raise exception.RegisteredLimitNotFound(id=registered_limit_id) @@ -219,13 +226,13 @@ class UnifiedLimit(base.UnifiedLimitDriverBase): def get_registered_limit(self, registered_limit_id): with sql.session_for_read() as session: return self._get_registered_limit( - session, registered_limit_id).to_dict() + session, registered_limit_id + ).to_dict() def delete_registered_limit(self, registered_limit_id): try: with sql.session_for_write() as session: - ref = self._get_registered_limit(session, - registered_limit_id) + ref = self._get_registered_limit(session, registered_limit_id) self._check_referenced_limit_reference(ref) session.delete(ref) except db_exception.DBReferenceError: @@ -243,7 +250,8 @@ class UnifiedLimit(base.UnifiedLimitDriverBase): with sql.session_for_read() as session: registered_limits = session.query(RegisteredLimitModel) registered_limits = sql.filter_limit_query( - RegisteredLimitModel, registered_limits, hints) + RegisteredLimitModel, registered_limits, hints + ) reg_limits = registered_limits.all() if not reg_limits: raise exception.NoLimitReference @@ -258,8 +266,9 @@ class UnifiedLimit(base.UnifiedLimitDriverBase): new_limits = [] for limit in limits: target = self._check_and_fill_registered_limit_id(limit) - self._check_unified_limit_unique(target, - is_registered_limit=False) + self._check_unified_limit_unique( + target, is_registered_limit=False + ) ref = LimitModel.from_dict(target) session.add(ref) new_limit = ref.to_dict() @@ -297,13 +306,11 @@ class UnifiedLimit(base.UnifiedLimitDriverBase): def get_limit(self, limit_id): with sql.session_for_read() as session: - return self._get_limit(session, - limit_id).to_dict() + return self._get_limit(session, limit_id).to_dict() def delete_limit(self, limit_id): with sql.session_for_write() as session: - ref = self._get_limit(session, - limit_id) + ref = self._get_limit(session, limit_id) session.delete(ref) def delete_limits_for_project(self, project_id): diff --git a/keystone/limit/core.py b/keystone/limit/core.py index 4e79bcc09e..1b83e335c5 100644 --- a/keystone/limit/core.py +++ b/keystone/limit/core.py @@ -37,12 +37,14 @@ class Manager(manager.Manager): super(Manager, self).__init__(unified_limit_driver) self.enforcement_model = base.load_driver( - CONF.unified_limit.enforcement_model) + CONF.unified_limit.enforcement_model + ) def check_project_depth(self): """Check if project depth satisfies current enforcement model.""" PROVIDERS.resource_api.check_project_depth( - self.enforcement_model.MAX_PROJECT_TREE_DEPTH) + self.enforcement_model.MAX_PROJECT_TREE_DEPTH + ) def _assert_resource_exist(self, unified_limit, target): try: @@ -58,29 +60,34 @@ class Manager(manager.Manager): if project['is_domain']: # Treat the input limit as domain level limit. unified_limit['domain_id'] = unified_limit.pop( - 'project_id') + 'project_id' + ) domain_id = unified_limit.get('domain_id') if domain_id is not None: PROVIDERS.resource_api.get_domain(domain_id) except exception.ServiceNotFound: - raise exception.ValidationError(attribute='service_id', - target=target) + raise exception.ValidationError( + attribute='service_id', target=target + ) except exception.RegionNotFound: - raise exception.ValidationError(attribute='region_id', - target=target) + raise exception.ValidationError( + attribute='region_id', target=target + ) except exception.ProjectNotFound: - raise exception.ValidationError(attribute='project_id', - target=target) + raise exception.ValidationError( + attribute='project_id', target=target + ) except exception.DomainNotFound: - raise exception.ValidationError(attribute='domain_id', - target=target) + raise exception.ValidationError( + attribute='domain_id', target=target + ) def get_model(self): """Return information of the configured enforcement model.""" return { 'name': self.enforcement_model.NAME, - 'description': self.enforcement_model.DESCRIPTION + 'description': self.enforcement_model.DESCRIPTION, } def create_registered_limits(self, registered_limits): @@ -91,15 +98,18 @@ class Manager(manager.Manager): def update_registered_limit(self, registered_limit_id, registered_limit): self._assert_resource_exist(registered_limit, 'registered_limit') updated_registered_limit = self.driver.update_registered_limit( - registered_limit_id, registered_limit) - self.get_registered_limit.invalidate(self, - updated_registered_limit['id']) + registered_limit_id, registered_limit + ) + self.get_registered_limit.invalidate( + self, updated_registered_limit['id'] + ) return updated_registered_limit @manager.response_truncated def list_registered_limits(self, hints=None): return self.driver.list_registered_limits( - hints or driver_hints.Hints()) + hints or driver_hints.Hints() + ) @MEMOIZE def get_registered_limit(self, registered_limit_id): diff --git a/keystone/limit/models/base.py b/keystone/limit/models/base.py index 281d08b5eb..12e79734a8 100644 --- a/keystone/limit/models/base.py +++ b/keystone/limit/models/base.py @@ -22,13 +22,12 @@ CONF = keystone.conf.CONF def load_driver(driver_name, *args): namespace = 'keystone.unified_limit.model' try: - driver_manager = stevedore.DriverManager(namespace, - driver_name, - invoke_on_load=True, - invoke_args=args) + driver_manager = stevedore.DriverManager( + namespace, driver_name, invoke_on_load=True, invoke_args=args + ) return driver_manager.driver except stevedore.exception.NoMatches: - msg = (_('Unable to find %(name)r driver in %(namespace)r.')) + msg = _('Unable to find %(name)r driver in %(namespace)r.') raise ImportError(msg % {'name': driver_name, 'namespace': namespace}) diff --git a/keystone/limit/models/strict_two_level.py b/keystone/limit/models/strict_two_level.py index c174eb50bb..adff9b5468 100644 --- a/keystone/limit/models/strict_two_level.py +++ b/keystone/limit/models/strict_two_level.py @@ -30,8 +30,14 @@ class StrictTwoLevelModel(base.ModelBase): ) MAX_PROJECT_TREE_DEPTH = 2 - def _get_specified_limit_value(self, resource_name, service_id, - region_id, project_id=None, domain_id=None): + def _get_specified_limit_value( + self, + resource_name, + service_id, + region_id, + project_id=None, + domain_id=None, + ): """Get the specified limit value. Try to give the resource limit first. If the specified limit is a @@ -58,8 +64,15 @@ class StrictTwoLevelModel(base.ModelBase): limit_value = limits[0]['default_limit'] if limits else None return limit_value - def _check_limit(self, resource_name, service_id, region_id, - resource_limit, domain_id=None, parent_id=None): + def _check_limit( + self, + resource_name, + service_id, + region_id, + resource_limit, + domain_id=None, + parent_id=None, + ): """Check the specified limit value satisfies the related project tree. 1. Ensure the limit is smaller than its parent. @@ -70,22 +83,29 @@ class StrictTwoLevelModel(base.ModelBase): # This is a project limit, need make sure its limit is not bigger # than its parent. parent_limit_value = self._get_specified_limit_value( - resource_name, service_id, region_id, domain_id=parent_id) + resource_name, service_id, region_id, domain_id=parent_id + ) if parent_limit_value and resource_limit > parent_limit_value: raise exception.InvalidLimit( - reason="Limit is bigger than parent.") + reason="Limit is bigger than parent." + ) else: # This is a domain limit, need make sure its limit is not smaller # than its children. sub_projects = PROVIDERS.resource_api.list_projects_in_subtree( - domain_id) + domain_id + ) for sub_project in sub_projects: sub_limit_value = self._get_specified_limit_value( - resource_name, service_id, region_id, - project_id=sub_project['id']) + resource_name, + service_id, + region_id, + project_id=sub_project['id'], + ) if sub_limit_value and resource_limit < sub_limit_value: raise exception.InvalidLimit( - reason="Limit is smaller than child.") + reason="Limit is smaller than child." + ) def check_limit(self, limits): """Check the input limits satisfy the related project tree or not. @@ -106,20 +126,29 @@ class StrictTwoLevelModel(base.ModelBase): # limit is project level, its parent must be a domain. if project_id: parent_id = PROVIDERS.resource_api.get_project(project_id)[ - 'parent_id'] - parent_limit = list(filter( - lambda x: (x.get('domain_id') == parent_id and - x['service_id'] == service_id and - x.get('region_id') == region_id and - x['resource_name'] == resource_name), - limits)) + 'parent_id' + ] + parent_limit = list( + filter( + lambda x: ( + x.get('domain_id') == parent_id + and x['service_id'] == service_id + and x.get('region_id') == region_id + and x['resource_name'] == resource_name + ), + limits, + ) + ) if parent_limit: if resource_limit > parent_limit[0]['resource_limit']: - error = _("The value of the limit which project is" - " %(project_id)s should not bigger than " - "its parent domain %(domain_id)s.") % { + error = _( + "The value of the limit which project is" + " %(project_id)s should not bigger than " + "its parent domain %(domain_id)s." + ) % { "project_id": project_id, - "domain_id": parent_limit[0]['domain_id']} + "domain_id": parent_limit[0]['domain_id'], + } raise exception.InvalidLimit(reason=error) # The limit's parent is in request body, no need to # check the backend limit any more. @@ -127,35 +156,44 @@ class StrictTwoLevelModel(base.ModelBase): else: parent_id = None - self._check_limit(resource_name, service_id, region_id, - resource_limit, domain_id=domain_id, - parent_id=parent_id) + self._check_limit( + resource_name, + service_id, + region_id, + resource_limit, + domain_id=domain_id, + parent_id=parent_id, + ) except exception.InvalidLimit: - error = ("The resource limit (%(level)s: %(id)s, " - "resource_name: %(resource_name)s, " - "resource_limit: %(resource_limit)s, " - "service_id: %(service_id)s, " - "region_id: %(region_id)s) doesn't satisfy " - "current hierarchy model.") % { + error = ( + "The resource limit (%(level)s: %(id)s, " + "resource_name: %(resource_name)s, " + "resource_limit: %(resource_limit)s, " + "service_id: %(service_id)s, " + "region_id: %(region_id)s) doesn't satisfy " + "current hierarchy model." + ) % { 'level': 'project_id' if project_id else 'domain_id', 'id': project_id or domain_id, 'resource_name': resource_name, 'resource_limit': resource_limit, 'service_id': service_id, - 'region_id': region_id + 'region_id': region_id, } - tr_error = _("The resource limit (%(level)s: %(id)s, " - "resource_name: %(resource_name)s, " - "resource_limit: %(resource_limit)s, " - "service_id: %(service_id)s, " - "region_id: %(region_id)s) doesn't satisfy " - "current hierarchy model.") % { + tr_error = _( + "The resource limit (%(level)s: %(id)s, " + "resource_name: %(resource_name)s, " + "resource_limit: %(resource_limit)s, " + "service_id: %(service_id)s, " + "region_id: %(region_id)s) doesn't satisfy " + "current hierarchy model." + ) % { 'level': 'project_id' if project_id else 'domain_id', 'id': project_id or domain_id, 'resource_name': resource_name, 'resource_limit': resource_limit, 'service_id': service_id, - 'region_id': region_id + 'region_id': region_id, } LOG.error(error) raise exception.InvalidLimit(reason=tr_error) diff --git a/keystone/limit/schema.py b/keystone/limit/schema.py index 56b2337df9..60aeab749b 100644 --- a/keystone/limit/schema.py +++ b/keystone/limit/schema.py @@ -17,33 +17,27 @@ from keystone.common.validation import parameter_types _registered_limit_properties = { 'service_id': parameter_types.id_string, - 'region_id': { - 'type': ['null', 'string'] - }, - 'resource_name': { - 'type': 'string', - 'minLength': 1, - 'maxLength': 255 - }, + 'region_id': {'type': ['null', 'string']}, + 'resource_name': {'type': 'string', 'minLength': 1, 'maxLength': 255}, 'default_limit': { 'type': 'integer', 'minimum': -1, - 'maximum': 0x7FFFFFFF # The maximum value a signed INT may have + 'maximum': 0x7FFFFFFF, # The maximum value a signed INT may have }, - 'description': validation.nullable(parameter_types.description) + 'description': validation.nullable(parameter_types.description), } _registered_limit_create = { 'type': 'object', 'properties': _registered_limit_properties, 'additionalProperties': False, - 'required': ['service_id', 'resource_name', 'default_limit'] + 'required': ['service_id', 'resource_name', 'default_limit'], } registered_limit_create = { 'type': 'array', 'items': _registered_limit_create, - 'minItems': 1 + 'minItems': 1, } registered_limit_update = { 'type': 'object', @@ -54,74 +48,68 @@ registered_limit_update = { _project_limit_create_properties = { 'project_id': parameter_types.id_string, 'service_id': parameter_types.id_string, - 'region_id': { - 'type': 'string' - }, - 'resource_name': { - 'type': 'string', - 'minLength': 1, - 'maxLength': 255 - }, + 'region_id': {'type': 'string'}, + 'resource_name': {'type': 'string', 'minLength': 1, 'maxLength': 255}, 'resource_limit': { 'type': 'integer', 'minimum': -1, - 'maximum': 0x7FFFFFFF # The maximum value a signed INT may have + 'maximum': 0x7FFFFFFF, # The maximum value a signed INT may have }, - 'description': validation.nullable(parameter_types.description) + 'description': validation.nullable(parameter_types.description), } _domain_limit_create_properties = { 'domain_id': parameter_types.id_string, 'service_id': parameter_types.id_string, - 'region_id': { - 'type': 'string' - }, - 'resource_name': { - 'type': 'string', - 'minLength': 1, - 'maxLength': 255 - }, + 'region_id': {'type': 'string'}, + 'resource_name': {'type': 'string', 'minLength': 1, 'maxLength': 255}, 'resource_limit': { 'type': 'integer', 'minimum': -1, - 'maximum': 0x7FFFFFFF # The maximum value a signed INT may have + 'maximum': 0x7FFFFFFF, # The maximum value a signed INT may have }, - 'description': validation.nullable(parameter_types.description) + 'description': validation.nullable(parameter_types.description), } _limit_create = { 'type': 'object', 'oneOf': [ - {'properties': _project_limit_create_properties, - 'required': ['project_id', 'service_id', 'resource_name', - 'resource_limit'], - 'additionalProperties': False, - }, - {'properties': _domain_limit_create_properties, - 'required': ['domain_id', 'service_id', 'resource_name', - 'resource_limit'], - 'additionalProperties': False, - }, - ] + { + 'properties': _project_limit_create_properties, + 'required': [ + 'project_id', + 'service_id', + 'resource_name', + 'resource_limit', + ], + 'additionalProperties': False, + }, + { + 'properties': _domain_limit_create_properties, + 'required': [ + 'domain_id', + 'service_id', + 'resource_name', + 'resource_limit', + ], + 'additionalProperties': False, + }, + ], } -limit_create = { - 'type': 'array', - 'items': _limit_create, - 'minItems': 1 -} +limit_create = {'type': 'array', 'items': _limit_create, 'minItems': 1} _limit_update_properties = { 'resource_limit': { 'type': 'integer', 'minimum': -1, - 'maximum': 0x7FFFFFFF # The maximum value a signed INT may have + 'maximum': 0x7FFFFFFF, # The maximum value a signed INT may have }, - 'description': validation.nullable(parameter_types.description) + 'description': validation.nullable(parameter_types.description), } limit_update = { 'type': 'object', 'properties': _limit_update_properties, - 'additionalProperties': False + 'additionalProperties': False, } diff --git a/keystone/models/receipt_model.py b/keystone/models/receipt_model.py index bb015b1727..3d7eddb83b 100644 --- a/keystone/models/receipt_model.py +++ b/keystone/models/receipt_model.py @@ -46,7 +46,7 @@ class ReceiptModel(object): def __repr__(self): """Return string representation of KeystoneReceipt.""" - desc = ('<%(type)s at %(loc)s>') + desc = '<%(type)s at %(loc)s>' self_cls_name = reflection.get_class_name(self, fully_qualified=False) return desc % {'type': self_cls_name, 'loc': hex(id(self))} @@ -90,9 +90,11 @@ class ReceiptModel(object): def required_methods(self): if not self.__required_methods: mfa_rules = self.user['options'].get( - ro.MFA_RULES_OPT.option_name, []) + ro.MFA_RULES_OPT.option_name, [] + ) rules = core.UserMFARulesValidator._parse_rule_structure( - mfa_rules, self.user_id) + mfa_rules, self.user_id + ) methods = set(self.methods) active_methods = set(core.AUTH_METHODS.keys()) diff --git a/keystone/models/revoke_model.py b/keystone/models/revoke_model.py index c36d4fdc30..daea2a8c27 100644 --- a/keystone/models/revoke_model.py +++ b/keystone/models/revoke_model.py @@ -22,16 +22,18 @@ LOG = log.getLogger(__name__) # The set of attributes common between the RevokeEvent # and the dictionaries created from the token Data. -_NAMES = ['trust_id', - 'consumer_id', - 'access_token_id', - 'audit_id', - 'audit_chain_id', - 'expires_at', - 'domain_id', - 'project_id', - 'user_id', - 'role_id'] +_NAMES = [ + 'trust_id', + 'consumer_id', + 'access_token_id', + 'audit_id', + 'audit_chain_id', + 'expires_at', + 'domain_id', + 'project_id', + 'user_id', + 'role_id', +] # Additional arguments for creating a RevokeEvent @@ -44,11 +46,13 @@ _EVENT_NAMES = _NAMES + ['domain_scope_id'] # Values that will be in the token data but not in the event. # These will compared with event values that have different names. # For example: both trustor_id and trustee_id are compared against user_id -_TOKEN_KEYS = ['identity_domain_id', - 'assignment_domain_id', - 'issued_at', - 'trustor_id', - 'trustee_id'] +_TOKEN_KEYS = [ + 'identity_domain_id', + 'assignment_domain_id', + 'issued_at', + 'trustor_id', + 'trustee_id', +] # Alternative names to be checked in token for every field in # revoke tree. @@ -56,7 +60,9 @@ ALTERNATIVES = { 'user_id': ['user_id', 'trustor_id', 'trustee_id'], 'domain_id': ['identity_domain_id', 'assignment_domain_id'], # For a domain-scoped token, the domain is in assignment_domain_id. - 'domain_scope_id': ['assignment_domain_id', ], + 'domain_scope_id': [ + 'assignment_domain_id', + ], } @@ -99,16 +105,20 @@ class RevokeEvent(object): self.issued_before = self.revoked_at def to_dict(self): - keys = ['user_id', - 'role_id', - 'domain_id', - 'domain_scope_id', - 'project_id', - 'audit_id', - 'audit_chain_id', - ] - event = {key: self.__dict__[key] for key in keys - if self.__dict__[key] is not None} + keys = [ + 'user_id', + 'role_id', + 'domain_id', + 'domain_scope_id', + 'project_id', + 'audit_id', + 'audit_chain_id', + ] + event = { + key: self.__dict__[key] + for key in keys + if self.__dict__[key] is not None + } if self.trust_id is not None: event['OS-TRUST:trust_id'] = self.trust_id if self.consumer_id is not None: @@ -118,11 +128,13 @@ class RevokeEvent(object): if self.expires_at is not None: event['expires_at'] = utils.isotime(self.expires_at) if self.issued_before is not None: - event['issued_before'] = utils.isotime(self.issued_before, - subsecond=True) + event['issued_before'] = utils.isotime( + self.issued_before, subsecond=True + ) if self.revoked_at is not None: - event['revoked_at'] = utils.isotime(self.revoked_at, - subsecond=True) + event['revoked_at'] = utils.isotime( + self.revoked_at, subsecond=True + ) return event @@ -171,34 +183,41 @@ def matches(event, token_values): # The token has two attributes that can match the domain_id. if event.domain_id is not None and event.domain_id not in ( - token_values['identity_domain_id'], - token_values['assignment_domain_id'],): + token_values['identity_domain_id'], + token_values['assignment_domain_id'], + ): return False if event.domain_scope_id is not None and event.domain_scope_id not in ( - token_values['assignment_domain_id'],): + token_values['assignment_domain_id'], + ): return False # If an event specifies an attribute name, but it does not match, the token # is not revoked. if event.expires_at is not None and event.expires_at not in ( - token_values['expires_at'],): + token_values['expires_at'], + ): return False if event.trust_id is not None and event.trust_id not in ( - token_values['trust_id'],): + token_values['trust_id'], + ): return False if event.consumer_id is not None and event.consumer_id not in ( - token_values['consumer_id'],): + token_values['consumer_id'], + ): return False if event.audit_chain_id is not None and event.audit_chain_id not in ( - token_values['audit_chain_id'],): + token_values['audit_chain_id'], + ): return False if event.role_id is not None and event.role_id not in ( - token_values['roles']): + token_values['roles'] + ): return False return True @@ -215,7 +234,8 @@ def build_token_values(token): token_values = { 'expires_at': timeutils.normalize_time(token_expires_at), 'issued_at': timeutils.normalize_time( - timeutils.parse_isotime(token.issued_at)), + timeutils.parse_isotime(token.issued_at) + ), 'audit_id': token.audit_id, 'audit_chain_id': token.parent_audit_id, } @@ -285,8 +305,10 @@ class _RevokeEventHandler(object): try: revoke_event = RevokeEvent(**revoke_event_data) except Exception: - LOG.debug("Failed to deserialize RevokeEvent. Data is %s", - revoke_event_data) + LOG.debug( + "Failed to deserialize RevokeEvent. Data is %s", + revoke_event_data, + ) raise return revoke_event diff --git a/keystone/models/token_model.py b/keystone/models/token_model.py index 78146295d2..0f48eb9d54 100644 --- a/keystone/models/token_model.py +++ b/keystone/models/token_model.py @@ -84,13 +84,17 @@ class TokenModel(object): def __repr__(self): """Return string representation of TokenModel.""" - desc = ('<%(type)s (audit_id=%(audit_id)s, ' - 'audit_chain_id=%(audit_ids)s) at %(loc)s>') + desc = ( + '<%(type)s (audit_id=%(audit_id)s, ' + 'audit_chain_id=%(audit_ids)s) at %(loc)s>' + ) self_cls_name = reflection.get_class_name(self, fully_qualified=False) - return desc % {'type': self_cls_name, - 'audit_id': self.audit_id, - 'audit_ids': self.audit_ids, - 'loc': hex(id(self))} + return desc % { + 'type': self_cls_name, + 'audit_id': self.audit_id, + 'audit_ids': self.audit_ids, + 'loc': hex(id(self)), + } @property def audit_ids(self): @@ -121,8 +125,12 @@ class TokenModel(object): @property def unscoped(self): return not any( - [self.system_scoped, self.domain_scoped, self.project_scoped, - self.trust_scoped] + [ + self.system_scoped, + self.domain_scoped, + self.project_scoped, + self.trust_scoped, + ] ) @property @@ -297,21 +305,17 @@ class TokenModel(object): # because the user ID of the original trustor helps us determine scope # in the redelegated context. if self.trust.get('redelegated_trust_id'): - trust_chain = PROVIDERS.trust_api.get_trust_pedigree( - self.trust_id - ) + trust_chain = PROVIDERS.trust_api.get_trust_pedigree(self.trust_id) original_trustor_id = trust_chain[-1]['trustor_user_id'] else: original_trustor_id = self.trustor['id'] - trust_roles = [ - {'role_id': role['id']} for role in self.trust['roles'] - ] - effective_trust_roles = ( - PROVIDERS.assignment_api.add_implied_roles(trust_roles) + trust_roles = [{'role_id': role['id']} for role in self.trust['roles']] + effective_trust_roles = PROVIDERS.assignment_api.add_implied_roles( + trust_roles ) - effective_trust_role_ids = ( - set([r['role_id'] for r in effective_trust_roles]) + effective_trust_role_ids = set( + [r['role_id'] for r in effective_trust_roles] ) current_effective_trustor_roles = ( @@ -326,8 +330,7 @@ class TokenModel(object): if role['domain_id'] is None: roles.append(role) else: - raise exception.Forbidden( - _('Trustee has no delegated roles.')) + raise exception.Forbidden(_('Trustee has no delegated roles.')) return roles @@ -335,7 +338,8 @@ class TokenModel(object): roles = [] access_token_roles = self.access_token['role_ids'] access_token_roles = [ - {'role_id': r} for r in jsonutils.loads(access_token_roles)] + {'role_id': r} for r in jsonutils.loads(access_token_roles) + ] effective_access_token_roles = ( PROVIDERS.assignment_api.add_implied_roles(access_token_roles) ) @@ -354,9 +358,7 @@ class TokenModel(object): ) for group_id in group_ids: group_roles = ( - PROVIDERS.assignment_api.list_system_grants_for_group( - group_id - ) + PROVIDERS.assignment_api.list_system_grants_for_group(group_id) ) for role in group_roles: federated_roles.append(role) @@ -403,10 +405,8 @@ class TokenModel(object): def _get_domain_roles(self): roles = [] - domain_roles = ( - PROVIDERS.assignment_api.get_roles_for_user_and_domain( - self.user_id, self.domain_id - ) + domain_roles = PROVIDERS.assignment_api.get_roles_for_user_and_domain( + self.user_id, self.domain_id ) for role_id in domain_roles: role = PROVIDERS.role_api.get_role(role_id) @@ -434,7 +434,8 @@ class TokenModel(object): user_id=self.user_id, project_id=self.project_id, domain_id=self.domain_id, - effective=True) + effective=True, + ) user_roles = list(set([x['role_id'] for x in assignment_list])) for role in app_cred_roles: @@ -468,17 +469,23 @@ class TokenModel(object): def _validate_token_resources(self): if self.project and not self.project.get('enabled'): - msg = ('Unable to validate token because project %(id)s is ' - 'disabled') % {'id': self.project_id} - tr_msg = _('Unable to validate token because project %(id)s is ' - 'disabled') % {'id': self.project_id} + msg = ( + 'Unable to validate token because project %(id)s is ' + 'disabled' + ) % {'id': self.project_id} + tr_msg = _( + 'Unable to validate token because project %(id)s is ' + 'disabled' + ) % {'id': self.project_id} LOG.warning(msg) raise exception.ProjectNotFound(tr_msg) if self.project and not self.project_domain.get('enabled'): - msg = ('Unable to validate token because domain %(id)s is ' - 'disabled') % {'id': self.project_domain['id']} - tr_msg = _('Unable to validate token because domain %(id)s is ' - 'disabled') % {'id': self.project_domain['id']} + msg = ( + 'Unable to validate token because domain %(id)s is ' 'disabled' + ) % {'id': self.project_domain['id']} + tr_msg = _( + 'Unable to validate token because domain %(id)s is ' 'disabled' + ) % {'id': self.project_domain['id']} LOG.warning(msg) raise exception.DomainNotFound(tr_msg) @@ -500,26 +507,28 @@ class TokenModel(object): raise exception.TokenNotFound(_('Trustee domain is disabled.')) try: - PROVIDERS.identity_api.assert_user_enabled( - self.trustor['id'] - ) + PROVIDERS.identity_api.assert_user_enabled(self.trustor['id']) except AssertionError: raise exception.Forbidden(_('Trustor is disabled.')) if not self.user_domain.get('enabled'): - msg = ('Unable to validate token because domain %(id)s is ' - 'disabled') % {'id': self.user_domain['id']} - tr_msg = _('Unable to validate token because domain %(id)s is ' - 'disabled') % {'id': self.user_domain['id']} + msg = ( + 'Unable to validate token because domain %(id)s is ' 'disabled' + ) % {'id': self.user_domain['id']} + tr_msg = _( + 'Unable to validate token because domain %(id)s is ' 'disabled' + ) % {'id': self.user_domain['id']} LOG.warning(msg) raise exception.DomainNotFound(tr_msg) def _validate_system_scope(self): if self.system_scoped and not self.roles: - msg = ('User %(user_id)s has no access to the system' - ) % {'user_id': self.user_id} - tr_msg = _('User %(user_id)s has no access to the system' - ) % {'user_id': self.user_id} + msg = ('User %(user_id)s has no access to the system') % { + 'user_id': self.user_id + } + tr_msg = _('User %(user_id)s has no access to the system') % { + 'user_id': self.user_id + } LOG.debug(msg) raise exception.Unauthorized(tr_msg) @@ -552,8 +561,8 @@ class TokenModel(object): effective_trust_roles = PROVIDERS.assignment_api.add_implied_roles( refs ) - effective_trust_role_ids = ( - set([r['role_id'] for r in effective_trust_roles]) + effective_trust_role_ids = set( + [r['role_id'] for r in effective_trust_roles] ) current_effective_trustor_roles = ( PROVIDERS.assignment_api.get_roles_for_trustor_and_project( @@ -570,7 +579,8 @@ class TokenModel(object): trust_roles.append(role) else: raise exception.Forbidden( - _('Trustee has no delegated roles.')) + _('Trustee has no delegated roles.') + ) def mint(self, token_id, issued_at): """Set the ``id`` and ``issued_at`` attributes of a token. diff --git a/keystone/notifications.py b/keystone/notifications.py index a59b1d0ba2..68fe6c76e6 100644 --- a/keystone/notifications.py +++ b/keystone/notifications.py @@ -46,10 +46,15 @@ LOG = log.getLogger(__name__) # NOTE(gyee): actions that can be notified. One must update this list whenever # a new action is supported. _ACTIONS = collections.namedtuple( - 'NotificationActions', - 'created, deleted, disabled, updated, internal') -ACTIONS = _ACTIONS(created='created', deleted='deleted', disabled='disabled', - updated='updated', internal='internal') + 'NotificationActions', 'created, deleted, disabled, updated, internal' +) +ACTIONS = _ACTIONS( + created='created', + deleted='deleted', + disabled='disabled', + updated='updated', + internal='internal', +) """The actions on resources.""" CADF_TYPE_MAP = { @@ -90,10 +95,12 @@ DOMAIN_DELETED = 'domain_deleted' def build_audit_initiator(): """A pyCADF initiator describing the current authenticated context.""" - pycadf_host = host.Host(address=flask.request.remote_addr, - agent=str(flask.request.user_agent)) - initiator = resource.Resource(typeURI=taxonomy.ACCOUNT_USER, - host=pycadf_host) + pycadf_host = host.Host( + address=flask.request.remote_addr, agent=str(flask.request.user_agent) + ) + initiator = resource.Resource( + typeURI=taxonomy.ACCOUNT_USER, host=pycadf_host + ) oslo_context = flask.request.environ.get(context.REQUEST_CONTEXT_ENV) if oslo_context.user_id: initiator.id = utils.resource_uuid(oslo_context.user_id) @@ -121,8 +128,16 @@ class Audit(object): """ @classmethod - def _emit(cls, operation, resource_type, resource_id, initiator, public, - actor_dict=None, reason=None): + def _emit( + cls, + operation, + resource_type, + resource_id, + initiator, + public, + actor_dict=None, + reason=None, + ): """Directly send an event notification. :param operation: one of the values from ACTIONS @@ -149,54 +164,143 @@ class Audit(object): resource_id, initiator=initiator, actor_dict=actor_dict, - public=public) + public=public, + ) if CONF.notification_format == 'cadf' and public: outcome = taxonomy.OUTCOME_SUCCESS - _create_cadf_payload(operation, resource_type, resource_id, - outcome, initiator, reason) + _create_cadf_payload( + operation, + resource_type, + resource_id, + outcome, + initiator, + reason, + ) @classmethod - def created(cls, resource_type, resource_id, initiator=None, - public=True, reason=None): - cls._emit(ACTIONS.created, resource_type, resource_id, initiator, - public, reason=reason) + def created( + cls, + resource_type, + resource_id, + initiator=None, + public=True, + reason=None, + ): + cls._emit( + ACTIONS.created, + resource_type, + resource_id, + initiator, + public, + reason=reason, + ) @classmethod - def updated(cls, resource_type, resource_id, initiator=None, - public=True, reason=None): - cls._emit(ACTIONS.updated, resource_type, resource_id, initiator, - public, reason=reason) + def updated( + cls, + resource_type, + resource_id, + initiator=None, + public=True, + reason=None, + ): + cls._emit( + ACTIONS.updated, + resource_type, + resource_id, + initiator, + public, + reason=reason, + ) @classmethod - def disabled(cls, resource_type, resource_id, initiator=None, - public=True, reason=None): - cls._emit(ACTIONS.disabled, resource_type, resource_id, initiator, - public, reason=reason) + def disabled( + cls, + resource_type, + resource_id, + initiator=None, + public=True, + reason=None, + ): + cls._emit( + ACTIONS.disabled, + resource_type, + resource_id, + initiator, + public, + reason=reason, + ) @classmethod - def deleted(cls, resource_type, resource_id, initiator=None, - public=True, reason=None): - cls._emit(ACTIONS.deleted, resource_type, resource_id, initiator, - public, reason=reason) + def deleted( + cls, + resource_type, + resource_id, + initiator=None, + public=True, + reason=None, + ): + cls._emit( + ACTIONS.deleted, + resource_type, + resource_id, + initiator, + public, + reason=reason, + ) @classmethod - def added_to(cls, target_type, target_id, actor_type, actor_id, - initiator=None, public=True, reason=None): - actor_dict = {'id': actor_id, - 'type': actor_type, - 'actor_operation': 'added'} - cls._emit(ACTIONS.updated, target_type, target_id, initiator, public, - actor_dict=actor_dict, reason=reason) + def added_to( + cls, + target_type, + target_id, + actor_type, + actor_id, + initiator=None, + public=True, + reason=None, + ): + actor_dict = { + 'id': actor_id, + 'type': actor_type, + 'actor_operation': 'added', + } + cls._emit( + ACTIONS.updated, + target_type, + target_id, + initiator, + public, + actor_dict=actor_dict, + reason=reason, + ) @classmethod - def removed_from(cls, target_type, target_id, actor_type, actor_id, - initiator=None, public=True, reason=None): - actor_dict = {'id': actor_id, - 'type': actor_type, - 'actor_operation': 'removed'} - cls._emit(ACTIONS.updated, target_type, target_id, initiator, public, - actor_dict=actor_dict, reason=reason) + def removed_from( + cls, + target_type, + target_id, + actor_type, + actor_id, + initiator=None, + public=True, + reason=None, + ): + actor_dict = { + 'id': actor_id, + 'type': actor_type, + 'actor_operation': 'removed', + } + cls._emit( + ACTIONS.updated, + target_type, + target_id, + initiator, + public, + actor_dict=actor_dict, + reason=reason, + ) @classmethod def internal(cls, resource_type, resource_id, reason=None): @@ -207,8 +311,14 @@ class Audit(object): # internal notification publicly. initiator = None public = False - cls._emit(ACTIONS.internal, resource_type, resource_id, initiator, - public, reason) + cls._emit( + ACTIONS.internal, + resource_type, + resource_id, + initiator, + public, + reason, + ) def invalidate_token_cache_notification(reason): @@ -232,8 +342,12 @@ def invalidate_token_cache_notification(reason): initiator = None public = False Audit._emit( - ACTIONS.internal, INVALIDATE_TOKEN_CACHE, resource_id, initiator, - public, reason=reason + ACTIONS.internal, + INVALIDATE_TOKEN_CACHE, + resource_id, + initiator, + public, + reason=reason, ) @@ -250,8 +364,9 @@ def _get_callback_info(callback): module_name = getattr(callback, '__module__', None) func_name = callback.__name__ if inspect.ismethod(callback): - class_name = reflection.get_class_name(callback.__self__, - fully_qualified=False) + class_name = reflection.get_class_name( + callback.__self__, fully_qualified=False + ) return [module_name, class_name, func_name] else: return [module_name, func_name] @@ -270,9 +385,13 @@ def register_event_callback(event, resource_type, callbacks): :raises TypeError: If callback is not callable """ if event not in ACTIONS: - raise ValueError(_('%(event)s is not a valid notification event, must ' - 'be one of: %(actions)s') % - {'event': event, 'actions': ', '.join(ACTIONS)}) + raise ValueError( + _( + '%(event)s is not a valid notification event, must ' + 'be one of: %(actions)s' + ) + % {'event': event, 'actions': ', '.join(ACTIONS)} + ) if not hasattr(callbacks, '__iter__'): callbacks = [callbacks] @@ -326,11 +445,13 @@ def listener(cls): } """ + def init_wrapper(init): @functools.wraps(init) def __new_init__(self, *args, **kwargs): init(self, *args, **kwargs) _register_event_callbacks(self) + return __new_init__ def _register_event_callbacks(self): @@ -347,14 +468,19 @@ def notify_event_callbacks(service, resource_type, operation, payload): if operation in _SUBSCRIBERS: if resource_type in _SUBSCRIBERS[operation]: for cb in _SUBSCRIBERS[operation][resource_type]: - subst_dict = {'cb_name': cb.__name__, - 'service': service, - 'resource_type': resource_type, - 'operation': operation, - 'payload': payload} - LOG.debug('Invoking callback %(cb_name)s for event ' - '%(service)s %(resource_type)s %(operation)s for ' - '%(payload)s', subst_dict) + subst_dict = { + 'cb_name': cb.__name__, + 'service': service, + 'resource_type': resource_type, + 'operation': operation, + 'payload': payload, + } + LOG.debug( + 'Invoking callback %(cb_name)s for event ' + '%(service)s %(resource_type)s %(operation)s for ' + '%(payload)s', + subst_dict, + ) cb(service, resource_type, operation, payload) @@ -372,8 +498,9 @@ def _get_notifier(): host = CONF.default_publisher_id or socket.gethostname() try: transport = oslo_messaging.get_notification_transport(CONF) - _notifier = oslo_messaging.Notifier(transport, - "identity.%s" % host) + _notifier = oslo_messaging.Notifier( + transport, "identity.%s" % host + ) except Exception: LOG.exception("Failed to construct notifier") _notifier = False @@ -400,8 +527,9 @@ def reset_notifier(): _notifier = None -def _create_cadf_payload(operation, resource_type, resource_id, - outcome, initiator, reason=None): +def _create_cadf_payload( + operation, resource_type, resource_id, outcome, initiator, reason=None +): """Prepare data for CADF audit notifier. Transform the arguments into content to be consumed by the function that @@ -436,19 +564,31 @@ def _create_cadf_payload(operation, resource_type, resource_id, if resource_id == ROOT_DOMAIN: return - target = resource.Resource(typeURI=target_uri, - id=resource_id) + target = resource.Resource(typeURI=target_uri, id=resource_id) audit_kwargs = {'resource_info': resource_id} cadf_action = '%s.%s' % (operation, resource_type) event_type = '%s.%s.%s' % (SERVICE, resource_type, operation) - _send_audit_notification(cadf_action, initiator, outcome, - target, event_type, reason=reason, **audit_kwargs) + _send_audit_notification( + cadf_action, + initiator, + outcome, + target, + event_type, + reason=reason, + **audit_kwargs + ) -def _send_notification(operation, resource_type, resource_id, initiator=None, - actor_dict=None, public=True): +def _send_notification( + operation, + resource_type, + resource_id, + initiator=None, + actor_dict=None, + public=True, +): """Send notification to inform observers about the affected resource. This method doesn't raise an exception when sending the notification fails. @@ -488,7 +628,8 @@ def _send_notification(operation, resource_type, resource_id, initiator=None, event_type = '%(service)s.%(resource_type)s.%(operation)s' % { 'service': SERVICE, 'resource_type': resource_type, - 'operation': operation} + 'operation': operation, + } if _check_notification_opt_out(event_type, outcome=None): return try: @@ -496,7 +637,8 @@ def _send_notification(operation, resource_type, resource_id, initiator=None, except Exception: LOG.exception( 'Failed to send %(res_id)s %(event_type)s notification', - {'res_id': resource_id, 'event_type': event_type}) + {'res_id': resource_id, 'event_type': event_type}, + ) def _get_request_audit_info(context, user_id=None): @@ -517,12 +659,15 @@ def _get_request_audit_info(context, user_id=None): remote_addr = environment.get('REMOTE_ADDR') http_user_agent = environment.get('HTTP_USER_AGENT') if not user_id: - user_id = environment.get('KEYSTONE_AUTH_CONTEXT', - {}).get('user_id') - project_id = environment.get('KEYSTONE_AUTH_CONTEXT', - {}).get('project_id') - domain_id = environment.get('KEYSTONE_AUTH_CONTEXT', - {}).get('domain_id') + user_id = environment.get('KEYSTONE_AUTH_CONTEXT', {}).get( + 'user_id' + ) + project_id = environment.get('KEYSTONE_AUTH_CONTEXT', {}).get( + 'project_id' + ) + domain_id = environment.get('KEYSTONE_AUTH_CONTEXT', {}).get( + 'domain_id' + ) host = pycadf.host.Host(address=remote_addr, agent=http_user_agent) initiator = resource.Resource(typeURI=taxonomy.ACCOUNT_USER, host=host) @@ -571,28 +716,39 @@ class CadfNotificationWrapper(object): initiator.id = utils.resource_uuid(user_id) try: result = f(wrapped_self, user_id, *args, **kwargs) - except (exception.AccountLocked, - exception.PasswordExpired) as ex: + except (exception.AccountLocked, exception.PasswordExpired) as ex: # Send a CADF event with a reason for PCI-DSS related # authentication failures audit_reason = reason.Reason(str(ex), str(ex.code)) - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_FAILURE, - target, self.event_type, - reason=audit_reason) + _send_audit_notification( + self.action, + initiator, + taxonomy.OUTCOME_FAILURE, + target, + self.event_type, + reason=audit_reason, + ) if isinstance(ex, exception.AccountLocked): raise exception.Unauthorized raise except Exception: # For authentication failure send a CADF event as well - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_FAILURE, - target, self.event_type) + _send_audit_notification( + self.action, + initiator, + taxonomy.OUTCOME_FAILURE, + target, + self.event_type, + ) raise else: - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_SUCCESS, - target, self.event_type) + _send_audit_notification( + self.action, + initiator, + taxonomy.OUTCOME_SUCCESS, + target, + self.event_type, + ) return result return wrapper @@ -618,8 +774,11 @@ class CadfRoleAssignmentNotificationWrapper(object): def __init__(self, operation): self.action = '%s.%s' % (operation, self.ROLE_ASSIGNMENT) - self.event_type = '%s.%s.%s' % (SERVICE, self.ROLE_ASSIGNMENT, - operation) + self.event_type = '%s.%s.%s' % ( + SERVICE, + self.ROLE_ASSIGNMENT, + operation, + ) def __call__(self, f): @functools.wraps(f) @@ -661,7 +820,8 @@ class CadfRoleAssignmentNotificationWrapper(object): based on the method signature. """ call_args = inspect.getcallargs( - f, wrapped_self, role_id, *args, **kwargs) + f, wrapped_self, role_id, *args, **kwargs + ) inherited = call_args['inherited_to_projects'] initiator = call_args.get('initiator', None) target = resource.Resource(typeURI=taxonomy.ACCOUNT_USER) @@ -683,24 +843,32 @@ class CadfRoleAssignmentNotificationWrapper(object): try: result = f(wrapped_self, role_id, *args, **kwargs) except Exception: - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_FAILURE, - target, self.event_type, - **audit_kwargs) + _send_audit_notification( + self.action, + initiator, + taxonomy.OUTCOME_FAILURE, + target, + self.event_type, + **audit_kwargs + ) raise else: - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_SUCCESS, - target, self.event_type, - **audit_kwargs) + _send_audit_notification( + self.action, + initiator, + taxonomy.OUTCOME_SUCCESS, + target, + self.event_type, + **audit_kwargs + ) return result return wrapper -def send_saml_audit_notification(action, user_id, group_ids, - identity_provider, protocol, token_id, - outcome): +def send_saml_audit_notification( + action, user_id, group_ids, identity_provider, protocol, token_id, outcome +): """Send notification to inform observers about SAML events. :param action: Action being audited @@ -724,9 +892,13 @@ def send_saml_audit_notification(action, user_id, group_ids, user_id = user_id or taxonomy.UNKNOWN token_id = token_id or taxonomy.UNKNOWN group_ids = group_ids or [] - cred = credential.FederatedCredential(token=token_id, type=audit_type, - identity_provider=identity_provider, - user=user_id, groups=group_ids) + cred = credential.FederatedCredential( + token=token_id, + type=audit_type, + identity_provider=identity_provider, + user=user_id, + groups=group_ids, + ) initiator.credential = cred event_type = '%s.%s' % (SERVICE, action) _send_audit_notification(action, initiator, outcome, target, event_type) @@ -736,8 +908,9 @@ class _CatalogHelperObj(provider_api.ProviderAPIMixin, object): """A helper object to allow lookups of identity service id.""" -def _send_audit_notification(action, initiator, outcome, target, - event_type, reason=None, **kwargs): +def _send_audit_notification( + action, initiator, outcome, target, event_type, reason=None, **kwargs +): """Send CADF notification to inform observers about the affected resource. This method logs an exception when sending the notification fails. @@ -777,7 +950,8 @@ def _send_audit_notification(action, initiator, outcome, target, initiator=initiator, target=target, reason=reason, - observer=resource.Resource(typeURI=taxonomy.SERVICE_SECURITY)) + observer=resource.Resource(typeURI=taxonomy.SERVICE_SECURITY), + ) if service_id is not None: event.observer.id = service_id @@ -797,7 +971,8 @@ def _send_audit_notification(action, initiator, outcome, target, # notification should not interfere with the API request LOG.exception( 'Failed to send %(action)s %(event_type)s notification', - {'action': action, 'event_type': event_type}) + {'action': action, 'event_type': event_type}, + ) def _check_notification_opt_out(event_type, outcome): diff --git a/keystone/oauth1/backends/base.py b/keystone/oauth1/backends/base.py index f8142dd112..e8f8ad89d8 100644 --- a/keystone/oauth1/backends/base.py +++ b/keystone/oauth1/backends/base.py @@ -151,8 +151,9 @@ class Oauth1DriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def create_request_token(self, consumer_id, requested_project, - request_token_duration): + def create_request_token( + self, consumer_id, requested_project, request_token_duration + ): """Create request token. :param consumer_id: the id of the consumer diff --git a/keystone/oauth1/backends/sql.py b/keystone/oauth1/backends/sql.py index 944b58b8ae..04cd56bf11 100644 --- a/keystone/oauth1/backends/sql.py +++ b/keystone/oauth1/backends/sql.py @@ -40,17 +40,28 @@ class Consumer(sql.ModelBase, sql.ModelDictMixinWithExtras): class RequestToken(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'request_token' - attributes = ['id', 'request_secret', - 'verifier', 'authorizing_user_id', 'requested_project_id', - 'role_ids', 'consumer_id', 'expires_at'] + attributes = [ + 'id', + 'request_secret', + 'verifier', + 'authorizing_user_id', + 'requested_project_id', + 'role_ids', + 'consumer_id', + 'expires_at', + ] id = sql.Column(sql.String(64), primary_key=True, nullable=False) request_secret = sql.Column(sql.String(64), nullable=False) verifier = sql.Column(sql.String(64), nullable=True) authorizing_user_id = sql.Column(sql.String(64), nullable=True) requested_project_id = sql.Column(sql.String(64), nullable=False) role_ids = sql.Column(sql.Text(), nullable=True) - consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'), - nullable=False, index=True) + consumer_id = sql.Column( + sql.String(64), + sql.ForeignKey('consumer.id'), + nullable=False, + index=True, + ) expires_at = sql.Column(sql.String(64), nullable=True) @classmethod @@ -63,17 +74,25 @@ class RequestToken(sql.ModelBase, sql.ModelDictMixin): class AccessToken(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'access_token' - attributes = ['id', 'access_secret', 'authorizing_user_id', - 'project_id', 'role_ids', 'consumer_id', - 'expires_at'] + attributes = [ + 'id', + 'access_secret', + 'authorizing_user_id', + 'project_id', + 'role_ids', + 'consumer_id', + 'expires_at', + ] id = sql.Column(sql.String(64), primary_key=True, nullable=False) access_secret = sql.Column(sql.String(64), nullable=False) - authorizing_user_id = sql.Column(sql.String(64), nullable=False, - index=True) + authorizing_user_id = sql.Column( + sql.String(64), nullable=False, index=True + ) project_id = sql.Column(sql.String(64), nullable=False) role_ids = sql.Column(sql.Text(), nullable=False) - consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'), - nullable=False) + consumer_id = sql.Column( + sql.String(64), sql.ForeignKey('consumer.id'), nullable=False + ) expires_at = sql.Column(sql.String(64), nullable=True) @classmethod @@ -97,8 +116,7 @@ class OAuth1(base.Oauth1DriverBase): return consumer_ref.to_dict() def get_consumer(self, consumer_id): - return base.filter_consumer( - self.get_consumer_with_secret(consumer_id)) + return base.filter_consumer(self.get_consumer_with_secret(consumer_id)) def create_consumer(self, consumer_ref): with sql.session_for_write() as session: @@ -147,8 +165,9 @@ class OAuth1(base.Oauth1DriverBase): consumer.extra = new_consumer.extra return base.filter_consumer(consumer.to_dict()) - def create_request_token(self, consumer_id, requested_project, - request_token_duration): + def create_request_token( + self, consumer_id, requested_project, request_token_duration + ): request_token_id = uuid.uuid4().hex request_token_secret = uuid.uuid4().hex expiry_date = None @@ -182,14 +201,14 @@ class OAuth1(base.Oauth1DriverBase): token_ref = self._get_request_token(session, request_token_id) return token_ref.to_dict() - def authorize_request_token(self, request_token_id, user_id, - role_ids): + def authorize_request_token(self, request_token_id, user_id, role_ids): with sql.session_for_write() as session: token_ref = self._get_request_token(session, request_token_id) token_dict = token_ref.to_dict() token_dict['authorizing_user_id'] = user_id - token_dict['verifier'] = ''.join(random.sample(base.VERIFIER_CHARS, - 8)) + token_dict['verifier'] = ''.join( + random.sample(base.VERIFIER_CHARS, 8) + ) token_dict['role_ids'] = jsonutils.dumps(role_ids) new_token = RequestToken.from_dict(token_dict) @@ -209,8 +228,9 @@ class OAuth1(base.Oauth1DriverBase): expiry_date = None if access_token_duration > 0: now = timeutils.utcnow() - future = (now + - datetime.timedelta(seconds=access_token_duration)) + future = now + datetime.timedelta( + seconds=access_token_duration + ) expiry_date = utils.isotime(future, subsecond=True) # add Access Token diff --git a/keystone/oauth1/core.py b/keystone/oauth1/core.py index dd8a4482a0..ce7cb67218 100644 --- a/keystone/oauth1/core.py +++ b/keystone/oauth1/core.py @@ -89,18 +89,27 @@ def validate_oauth_params(query_string): if 'error' in params_fitered: msg = ( 'Validation failed with errors: %(error)s, detail ' - 'message is: %(desc)s.') % { - 'error': params_fitered['error'], - 'desc': params_fitered['error_description']} - tr_msg = _('Validation failed with errors: %(error)s, detail ' - 'message is: %(desc)s.') % { + 'message is: %(desc)s.' + ) % { 'error': params_fitered['error'], - 'desc': params_fitered['error_description']} + 'desc': params_fitered['error_description'], + } + tr_msg = _( + 'Validation failed with errors: %(error)s, detail ' + 'message is: %(desc)s.' + ) % { + 'error': params_fitered['error'], + 'desc': params_fitered['error_description'], + } else: - msg = ('Unknown parameters found,' - 'please provide only oauth parameters.') - tr_msg = _('Unknown parameters found,' - 'please provide only oauth parameters.') + msg = ( + 'Unknown parameters found,' + 'please provide only oauth parameters.' + ) + tr_msg = _( + 'Unknown parameters found,' + 'please provide only oauth parameters.' + ) LOG.warning(msg) raise exception.ValidationError(message=tr_msg) @@ -140,23 +149,31 @@ class Manager(manager.Manager): notifications.Audit.deleted(self._CONSUMER, consumer_id, initiator) return ret - def create_access_token(self, request_id, access_token_duration, - initiator=None): - ret = self.driver.create_access_token(request_id, - access_token_duration) + def create_access_token( + self, request_id, access_token_duration, initiator=None + ): + ret = self.driver.create_access_token( + request_id, access_token_duration + ) notifications.Audit.created(self._ACCESS_TOKEN, ret['id'], initiator) return ret def delete_access_token(self, user_id, access_token_id, initiator=None): ret = self.driver.delete_access_token(user_id, access_token_id) - notifications.Audit.deleted(self._ACCESS_TOKEN, access_token_id, - initiator) + notifications.Audit.deleted( + self._ACCESS_TOKEN, access_token_id, initiator + ) return ret - def create_request_token(self, consumer_id, requested_project, - request_token_duration, initiator=None): + def create_request_token( + self, + consumer_id, + requested_project, + request_token_duration, + initiator=None, + ): ret = self.driver.create_request_token( - consumer_id, requested_project, request_token_duration) - notifications.Audit.created(self._REQUEST_TOKEN, ret['id'], - initiator) + consumer_id, requested_project, request_token_duration + ) + notifications.Audit.created(self._REQUEST_TOKEN, ret['id'], initiator) return ret diff --git a/keystone/oauth1/schema.py b/keystone/oauth1/schema.py index 1b80809fd1..10ffcf308b 100644 --- a/keystone/oauth1/schema.py +++ b/keystone/oauth1/schema.py @@ -20,17 +20,15 @@ _consumer_properties = { consumer_create = { 'type': 'object', 'properties': _consumer_properties, - 'additionalProperties': True + 'additionalProperties': True, } consumer_update = { 'type': 'object', 'properties': _consumer_properties, - 'not': { - 'required': ['secret'] - }, + 'not': {'required': ['secret']}, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } request_token_authorize = { @@ -43,6 +41,6 @@ request_token_authorize = { }, 'minProperties': 1, 'maxProperties': 1, - 'additionalProperties': False - } + 'additionalProperties': False, + }, } diff --git a/keystone/oauth1/validator.py b/keystone/oauth1/validator.py index 0b601feed4..40bd6421b2 100644 --- a/keystone/oauth1/validator.py +++ b/keystone/oauth1/validator.py @@ -38,8 +38,7 @@ class OAuthValidator(provider_api.ProviderAPIMixin, oauth1.RequestValidator): def _check_token(self, token): # generic token verification when they're obtained from a uuid hex - return (set(token) <= self.safe_characters and - len(token) == 32) + return set(token) <= self.safe_characters and len(token) == 32 def check_client_key(self, client_key): return self._check_token(client_key) @@ -55,8 +54,10 @@ class OAuthValidator(provider_api.ProviderAPIMixin, oauth1.RequestValidator): return set(nonce) <= self.safe_characters def check_verifier(self, verifier): - return (all(i in base.VERIFIER_CHARS for i in verifier) and - len(verifier) == 8) + return ( + all(i in base.VERIFIER_CHARS for i in verifier) + and len(verifier) == 8 + ) def get_client_secret(self, client_key, request): client = PROVIDERS.oauth_api.get_consumer_with_secret(client_key) @@ -142,13 +143,15 @@ class OAuthValidator(provider_api.ProviderAPIMixin, oauth1.RequestValidator): except exception.NotFound: return False - def validate_timestamp_and_nonce(self, - client_key, - timestamp, - nonce, - request, - request_token=None, - access_token=None): + def validate_timestamp_and_nonce( + self, + client_key, + timestamp, + nonce, + request, + request_token=None, + access_token=None, + ): return True def validate_redirect_uri(self, client_key, redirect_uri, request): @@ -159,12 +162,9 @@ class OAuthValidator(provider_api.ProviderAPIMixin, oauth1.RequestValidator): # realms are not used return True - def validate_realms(self, - client_key, - token, - request, - uri=None, - realms=None): + def validate_realms( + self, client_key, token, request, uri=None, realms=None + ): return True def validate_verifier(self, client_key, token, verifier, request): @@ -186,22 +186,24 @@ class OAuthValidator(provider_api.ProviderAPIMixin, oauth1.RequestValidator): # implemented. The real implementation logic is in the backend. def save_access_token(self, token, request): pass -# token_duration = CONF.oauth1.request_token_duration -# request_token_id = request.client_key -# self.oauth_api.create_access_token(request_token_id, -# token_duration, -# token["oauth_token"], -# token["oauth_token_secret"]) + + # token_duration = CONF.oauth1.request_token_duration + # request_token_id = request.client_key + # self.oauth_api.create_access_token(request_token_id, + # token_duration, + # token["oauth_token"], + # token["oauth_token_secret"]) def save_request_token(self, token, request): pass -# project_id = request.headers.get('Requested-Project-Id') -# token_duration = CONF.oauth1.request_token_duration -# self.oauth_api.create_request_token(request.client_key, -# project_id, -# token_duration, -# token["oauth_token"], -# token["oauth_token_secret"]) + + # project_id = request.headers.get('Requested-Project-Id') + # token_duration = CONF.oauth1.request_token_duration + # self.oauth_api.create_request_token(request.client_key, + # project_id, + # token_duration, + # token["oauth_token"], + # token["oauth_token_secret"]) def save_verifier(self, token, verifier, request): """Associate an authorization verifier with a request token. diff --git a/keystone/oauth2/handlers.py b/keystone/oauth2/handlers.py index e2c16c5cf1..25b679928d 100644 --- a/keystone/oauth2/handlers.py +++ b/keystone/oauth2/handlers.py @@ -17,14 +17,18 @@ from keystone.server import flask as ks_flask def build_response(error): - response = flask.make_response(( - { - 'error': error.error_title, - 'error_description': error.message_format - }, - f"{error.code} {error.title}")) + response = flask.make_response( + ( + { + 'error': error.error_title, + 'error_description': error.message_format, + }, + f"{error.code} {error.title}", + ) + ) if error.code == 401: - response.headers['WWW-Authenticate'] = \ + response.headers['WWW-Authenticate'] = ( 'Keystone uri="%s"' % ks_flask.base_url() + ) return response diff --git a/keystone/policy/backends/rules.py b/keystone/policy/backends/rules.py index 596032da8f..f9e1580e29 100644 --- a/keystone/policy/backends/rules.py +++ b/keystone/policy/backends/rules.py @@ -28,9 +28,7 @@ LOG = log.getLogger(__name__) class Policy(base.PolicyDriverBase): def enforce(self, credentials, action, target): msg = 'enforce %(action)s: %(credentials)s' - LOG.debug(msg, { - 'action': action, - 'credentials': credentials}) + LOG.debug(msg, {'action': action, 'credentials': credentials}) policy.enforce(credentials, action, target) def create_policy(self, policy_id, policy): diff --git a/keystone/policy/schema.py b/keystone/policy/schema.py index 512c4ce7a1..e70092d26c 100644 --- a/keystone/policy/schema.py +++ b/keystone/policy/schema.py @@ -12,25 +12,20 @@ _policy_properties = { - 'blob': { - 'type': 'string' - }, - 'type': { - 'type': 'string', - 'maxLength': 255 - } + 'blob': {'type': 'string'}, + 'type': {'type': 'string', 'maxLength': 255}, } policy_create = { 'type': 'object', 'properties': _policy_properties, 'required': ['blob', 'type'], - 'additionalProperties': True + 'additionalProperties': True, } policy_update = { 'type': 'object', 'properties': _policy_properties, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } diff --git a/keystone/receipt/__init__.py b/keystone/receipt/__init__.py index 37ebed18af..fe8c922073 100644 --- a/keystone/receipt/__init__.py +++ b/keystone/receipt/__init__.py @@ -15,6 +15,4 @@ from keystone.receipt import provider -__all__ = ( - "provider", -) +__all__ = ("provider",) diff --git a/keystone/receipt/handlers.py b/keystone/receipt/handlers.py index 9a8d3e4b02..736fb3dbc9 100644 --- a/keystone/receipt/handlers.py +++ b/keystone/receipt/handlers.py @@ -26,17 +26,17 @@ PROVIDERS = provider_api.ProviderAPIs def extract_receipt(auth_context): receipt_id = flask.request.headers.get( - authorization.AUTH_RECEIPT_HEADER, None) + authorization.AUTH_RECEIPT_HEADER, None + ) if receipt_id: - receipt = PROVIDERS.receipt_provider_api.validate_receipt( - receipt_id) + receipt = PROVIDERS.receipt_provider_api.validate_receipt(receipt_id) if auth_context['user_id'] != receipt.user_id: raise exception.ReceiptNotFound( "AuthContext user_id: %s does not match " - "user_id for supplied auth receipt: %s" % - (auth_context['user_id'], receipt.user_id), - receipt_id=receipt_id + "user_id for supplied auth receipt: %s" + % (auth_context['user_id'], receipt.user_id), + receipt_id=receipt_id, ) else: receipt = None @@ -53,7 +53,7 @@ def _render_receipt_response_from_model(receipt): 'domain': { 'id': receipt.user_domain['id'], 'name': receipt.user_domain['name'], - } + }, }, 'expires_at': receipt.expires_at, 'issued_at': receipt.issued_at, @@ -64,8 +64,9 @@ def _render_receipt_response_from_model(receipt): def build_receipt(mfa_error): - receipt = PROVIDERS.receipt_provider_api. \ - issue_receipt(mfa_error.user_id, mfa_error.methods) + receipt = PROVIDERS.receipt_provider_api.issue_receipt( + mfa_error.user_id, mfa_error.methods + ) resp_data = _render_receipt_response_from_model(receipt) resp_body = jsonutils.dumps(resp_data) response = flask.make_response(resp_body, http.client.UNAUTHORIZED) diff --git a/keystone/receipt/provider.py b/keystone/receipt/provider.py index 6ea64d1efc..8bdd9208fd 100644 --- a/keystone/receipt/provider.py +++ b/keystone/receipt/provider.py @@ -36,8 +36,8 @@ PROVIDERS = provider_api.ProviderAPIs RECEIPTS_REGION = cache.create_region(name='receipts') MEMOIZE_RECEIPTS = cache.get_memoization_decorator( - group='receipt', - region=RECEIPTS_REGION) + group='receipt', region=RECEIPTS_REGION +) def default_expire_time(): @@ -82,15 +82,18 @@ class Manager(manager.Manager): ['project', self._drop_receipt_cache], ], notifications.ACTIONS.internal: [ - [notifications.INVALIDATE_TOKEN_CACHE, - self._drop_receipt_cache], - ] + [ + notifications.INVALIDATE_TOKEN_CACHE, + self._drop_receipt_cache, + ], + ], } for event, cb_info in callbacks.items(): for resource_type, callback_fns in cb_info: - notifications.register_event_callback(event, resource_type, - callback_fns) + notifications.register_event_callback( + event, resource_type, callback_fns + ) def _drop_receipt_cache(self, service, resource_type, operation, payload): """Invalidate the entire receipt cache. @@ -105,7 +108,8 @@ class Manager(manager.Manager): def validate_receipt(self, receipt_id, window_seconds=0): if not receipt_id: raise exception.ReceiptNotFound( - _('No receipt in the request'), receipt_id=receipt_id) + _('No receipt in the request'), receipt_id=receipt_id + ) try: receipt = self._validate_receipt(receipt_id) @@ -117,8 +121,9 @@ class Manager(manager.Manager): @MEMOIZE_RECEIPTS def _validate_receipt(self, receipt_id): - (user_id, methods, issued_at, - expires_at) = self.driver.validate_receipt(receipt_id) + (user_id, methods, issued_at, expires_at) = ( + self.driver.validate_receipt(receipt_id) + ) receipt = receipt_model.ReceiptModel() receipt.user_id = user_id @@ -139,16 +144,21 @@ class Manager(manager.Manager): expiry += datetime.timedelta(seconds=window_seconds) except Exception: - LOG.exception('Unexpected error or malformed receipt ' - 'determining receipt expiry: %s', receipt) + LOG.exception( + 'Unexpected error or malformed receipt ' + 'determining receipt expiry: %s', + receipt, + ) raise exception.ReceiptNotFound( - _('Failed to validate receipt'), receipt_id=receipt.id) + _('Failed to validate receipt'), receipt_id=receipt.id + ) if current_time < expiry: return None else: raise exception.ReceiptNotFound( - _('Failed to validate receipt'), receipt_id=receipt.id) + _('Failed to validate receipt'), receipt_id=receipt.id + ) def issue_receipt(self, user_id, method_names, expires_at=None): @@ -169,7 +179,6 @@ class Manager(manager.Manager): receipt.mint(receipt_id, issued_at) if CONF.receipt.cache_on_issue: - self._validate_receipt.set( - receipt, RECEIPTS_REGION, receipt_id) + self._validate_receipt.set(receipt, RECEIPTS_REGION, receipt_id) return receipt diff --git a/keystone/receipt/providers/fernet/__init__.py b/keystone/receipt/providers/fernet/__init__.py index b5574acab0..970c942670 100644 --- a/keystone/receipt/providers/fernet/__init__.py +++ b/keystone/receipt/providers/fernet/__init__.py @@ -15,6 +15,4 @@ from keystone.receipt.providers.fernet.core import Provider -__all__ = ( - "Provider", -) +__all__ = ("Provider",) diff --git a/keystone/receipt/providers/fernet/core.py b/keystone/receipt/providers/fernet/core.py index fb75f875e5..8a51caca5c 100644 --- a/keystone/receipt/providers/fernet/core.py +++ b/keystone/receipt/providers/fernet/core.py @@ -40,9 +40,14 @@ class Provider(base.Provider): raise SystemExit(_('%(key_repo)s does not exist') % subs) if not os.listdir(CONF.fernet_receipts.key_repository): subs = {'key_repo': CONF.fernet_receipts.key_repository} - raise SystemExit(_('%(key_repo)s does not contain keys, use ' - 'keystone-manage fernet_setup to create ' - 'Fernet keys.') % subs) + raise SystemExit( + _( + '%(key_repo)s does not contain keys, use ' + 'keystone-manage fernet_setup to create ' + 'Fernet keys.' + ) + % subs + ) self.receipt_formatter = tf.ReceiptFormatter() @@ -59,8 +64,7 @@ class Provider(base.Provider): receipt.expires_at, ) creation_datetime_obj = self.receipt_formatter.creation_time( - receipt_id) - issued_at = ks_utils.isotime( - at=creation_datetime_obj, subsecond=True + receipt_id ) + issued_at = ks_utils.isotime(at=creation_datetime_obj, subsecond=True) return receipt_id, issued_at diff --git a/keystone/receipt/receipt_formatters.py b/keystone/receipt/receipt_formatters.py index b2d3a9ae05..e63e70208c 100644 --- a/keystone/receipt/receipt_formatters.py +++ b/keystone/receipt/receipt_formatters.py @@ -58,7 +58,7 @@ class ReceiptFormatter(object): fernet_utils = utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) keys = fernet_utils.load_keys() @@ -91,7 +91,8 @@ class ReceiptFormatter(object): return self.crypto.decrypt(receipt.encode('utf-8')) except fernet.InvalidToken: raise exception.ValidationError( - _('This is not a recognized Fernet receipt %s') % receipt) + _('This is not a recognized Fernet receipt %s') % receipt + ) @classmethod def restore_padding(cls, receipt): @@ -122,7 +123,8 @@ class ReceiptFormatter(object): # Fernet receipts are base64 encoded, so we need to unpack them first # urlsafe_b64decode() requires bytes receipt_bytes = base64.urlsafe_b64decode( - fernet_receipt.encode('utf-8')) + fernet_receipt.encode('utf-8') + ) # slice into the byte array to get just the timestamp timestamp_bytes = receipt_bytes[TIMESTAMP_START:TIMESTAMP_END] @@ -150,9 +152,11 @@ class ReceiptFormatter(object): # anywhere, we can't say it isn't being stored somewhere else with # those kind of backend constraints. if len(receipt) > 255: - LOG.info('Fernet receipt created with length of %d ' - 'characters, which exceeds 255 characters', - len(receipt)) + LOG.info( + 'Fernet receipt created with length of %d ' + 'characters, which exceeds 255 characters', + len(receipt), + ) return receipt @@ -246,8 +250,10 @@ class ReceiptPayload(object): """ time_object = timeutils.parse_isotime(time_string) - return (timeutils.normalize_time(time_object) - - datetime.datetime.utcfromtimestamp(0)).total_seconds() + return ( + timeutils.normalize_time(time_object) + - datetime.datetime.utcfromtimestamp(0) + ).total_seconds() @classmethod def _convert_float_to_time_string(cls, time_float): diff --git a/keystone/resource/backends/sql.py b/keystone/resource/backends/sql.py index d51ccc3329..54e262f7f2 100644 --- a/keystone/resource/backends/sql.py +++ b/keystone/resource/backends/sql.py @@ -53,8 +53,7 @@ class Resource(base.ResourceDriverBase): query = session.query(sql_model.Project) query = query.filter_by(name=project_name) if domain_id is None: - query = query.filter_by( - domain_id=base.NULL_DOMAIN_ID) + query = query.filter_by(domain_id=base.NULL_DOMAIN_ID) else: query = query.filter_by(domain_id=domain_id) try: @@ -75,13 +74,14 @@ class Resource(base.ResourceDriverBase): # filter_limit_query() below, which will remove the filter from the # hints (hence ensuring our substitution is not exposed to the caller). for f in hints.filters: - if (f['name'] == 'domain_id' and f['value'] is None): + if f['name'] == 'domain_id' and f['value'] is None: f['value'] = base.NULL_DOMAIN_ID with sql.session_for_read() as session: query = session.query(sql_model.Project) query = query.filter(sql_model.Project.id != base.NULL_DOMAIN_ID) - project_refs = sql.filter_limit_query(sql_model.Project, query, - hints) + project_refs = sql.filter_limit_query( + sql_model.Project, query, hints + ) return [project_ref.to_dict() for project_ref in project_refs] def list_projects_from_ids(self, ids): @@ -91,8 +91,11 @@ class Resource(base.ResourceDriverBase): with sql.session_for_read() as session: query = session.query(sql_model.Project) query = query.filter(sql_model.Project.id.in_(ids)) - return [project_ref.to_dict() for project_ref in query.all() - if not self._is_hidden_ref(project_ref)] + return [ + project_ref.to_dict() + for project_ref in query.all() + if not self._is_hidden_ref(project_ref) + ] def list_project_ids_from_domain_ids(self, domain_ids): if not domain_ids: @@ -100,10 +103,12 @@ class Resource(base.ResourceDriverBase): else: with sql.session_for_read() as session: query = session.query(sql_model.Project.id) - query = ( - query.filter(sql_model.Project.domain_id.in_(domain_ids))) - return [x.id for x in query.all() - if not self._is_hidden_ref(x)] + query = query.filter( + sql_model.Project.domain_id.in_(domain_ids) + ) + return [ + x.id for x in query.all() if not self._is_hidden_ref(x) + ] def list_projects_in_domain(self, domain_id): with sql.session_for_read() as session: @@ -113,7 +118,8 @@ class Resource(base.ResourceDriverBase): raise exception.DomainNotFound(domain_id=domain_id) query = session.query(sql_model.Project) project_refs = query.filter( - sql_model.Project.domain_id == domain_id) + sql_model.Project.domain_id == domain_id + ) return [project_ref.to_dict() for project_ref in project_refs] def list_projects_acting_as_domain(self, hints): @@ -135,9 +141,11 @@ class Resource(base.ResourceDriverBase): children_ids = set() for ref in children: if ref['id'] in examined: - msg = ('Circular reference or a repeated ' - 'entry found in projects hierarchy - ' - '%(project_id)s.') + msg = ( + 'Circular reference or a repeated ' + 'entry found in projects hierarchy - ' + '%(project_id)s.' + ) LOG.error(msg, {'project_id': ref['id']}) return children_ids.add(ref['id']) @@ -154,15 +162,18 @@ class Resource(base.ResourceDriverBase): examined = set() while project.get('parent_id') is not None: if project['id'] in examined: - msg = ('Circular reference or a repeated ' - 'entry found in projects hierarchy - ' - '%(project_id)s.') + msg = ( + 'Circular reference or a repeated ' + 'entry found in projects hierarchy - ' + '%(project_id)s.' + ) LOG.error(msg, {'project_id': project['id']}) return examined.add(project['id']) parent_project = self._get_project( - session, project['parent_id']).to_dict() + session, project['parent_id'] + ).to_dict() parents.append(parent_project) project = parent_project return parents @@ -178,7 +189,8 @@ class Resource(base.ResourceDriverBase): query = session.query(sql_model.ProjectTag) if 'tags' in filters.keys(): filtered_ids += self._filter_ids_by_tags( - query, filters['tags'].split(',')) + query, filters['tags'].split(',') + ) if 'tags-any' in filters.keys(): any_tags = filters['tags-any'].split(',') subq = query.filter(sql_model.ProjectTag.name.in_(any_tags)) @@ -188,10 +200,11 @@ class Resource(base.ResourceDriverBase): filtered_ids = any_tags if 'not-tags' in filters.keys(): blacklist_ids = self._filter_ids_by_tags( - query, filters['not-tags'].split(',')) - filtered_ids = self._filter_not_tags(session, - filtered_ids, - blacklist_ids) + query, filters['not-tags'].split(',') + ) + filtered_ids = self._filter_not_tags( + session, filtered_ids, blacklist_ids + ) if 'not-tags-any' in filters.keys(): any_tags = filters['not-tags-any'].split(',') subq = query.filter(sql_model.ProjectTag.name.in_(any_tags)) @@ -199,22 +212,26 @@ class Resource(base.ResourceDriverBase): if 'not-tags' in filters.keys(): filtered_ids += blacklist_ids else: - filtered_ids = self._filter_not_tags(session, - filtered_ids, - blacklist_ids) + filtered_ids = self._filter_not_tags( + session, filtered_ids, blacklist_ids + ) if not filtered_ids: return [] query = session.query(sql_model.Project) query = query.filter(sql_model.Project.id.in_(filtered_ids)) - return [project_ref.to_dict() for project_ref in query.all() - if not self._is_hidden_ref(project_ref)] + return [ + project_ref.to_dict() + for project_ref in query.all() + if not self._is_hidden_ref(project_ref) + ] def _filter_ids_by_tags(self, query, tags): filtered_ids = [] subq = query.filter(sql_model.ProjectTag.name.in_(tags)) for ptag in subq: - subq_tags = query.filter(sql_model.ProjectTag.project_id == - ptag['project_id']) + subq_tags = query.filter( + sql_model.ProjectTag.project_id == ptag['project_id'] + ) result = map(lambda x: x['name'], subq_tags.all()) if set(tags) <= set(result): filtered_ids.append(ptag['project_id']) @@ -258,12 +275,16 @@ class Resource(base.ResourceDriverBase): # Move the "_resource_options" attribute over to the real ref # so that resource_options.resource_options_ref_to_mapper can # handle the work. - setattr(project_ref, '_resource_options', - getattr(new_project, '_resource_options', {})) + setattr( + project_ref, + '_resource_options', + getattr(new_project, '_resource_options', {}), + ) # Move options into the proper attribute mapper construct resource_options.resource_options_ref_to_mapper( - project_ref, sql_model.ProjectOption) + project_ref, sql_model.ProjectOption + ) project_ref.extra = new_project.extra return project_ref.to_dict(include_extra_dict=True) @@ -279,13 +300,18 @@ class Resource(base.ResourceDriverBase): return with sql.session_for_write() as session: query = session.query(sql_model.Project).filter( - sql_model.Project.id.in_(project_ids)) + sql_model.Project.id.in_(project_ids) + ) project_ids_from_bd = [p['id'] for p in query.all()] for project_id in project_ids: - if (project_id not in project_ids_from_bd or - project_id == base.NULL_DOMAIN_ID): - LOG.warning('Project %s does not exist and was not ' - 'deleted.', project_id) + if ( + project_id not in project_ids_from_bd + or project_id == base.NULL_DOMAIN_ID + ): + LOG.warning( + 'Project %s does not exist and was not ' 'deleted.', + project_id, + ) query.delete(synchronize_session=False) def check_project_depth(self, max_depth): @@ -342,9 +368,9 @@ class Resource(base.ResourceDriverBase): for index in range(max_depth): query = query.outerjoin( obj_list[index + 1], - obj_list[index].id == obj_list[index + 1].parent_id) - exceeded_lines = query.filter( - obj_list[-1].id != expression.null()) + obj_list[index].id == obj_list[index + 1].parent_id, + ) + exceeded_lines = query.filter(obj_list[-1].id != expression.null()) if exceeded_lines: return [line[max_depth].id for line in exceeded_lines] diff --git a/keystone/resource/backends/sql_model.py b/keystone/resource/backends/sql_model.py index ed90f63b6d..caef6928cd 100644 --- a/keystone/resource/backends/sql_model.py +++ b/keystone/resource/backends/sql_model.py @@ -28,8 +28,7 @@ class Project(sql.ModelBase, sql.ModelDictMixinWithExtras): # to represent null, as defined in NULL_DOMAIN_ID above. def to_dict(self, include_extra_dict=False): - d = super(Project, self).to_dict( - include_extra_dict=include_extra_dict) + d = super(Project, self).to_dict(include_extra_dict=include_extra_dict) if d['domain_id'] == base.NULL_DOMAIN_ID: d['domain_id'] = None # NOTE(notmorgan): Eventually it may make sense to drop the empty @@ -55,26 +54,36 @@ class Project(sql.ModelBase, sql.ModelDictMixinWithExtras): return project_obj __tablename__ = 'project' - attributes = ['id', 'name', 'domain_id', 'description', 'enabled', - 'parent_id', 'is_domain', 'tags'] + attributes = [ + 'id', + 'name', + 'domain_id', + 'description', + 'enabled', + 'parent_id', + 'is_domain', + 'tags', + ] resource_options_registry = ro.PROJECT_OPTIONS_REGISTRY id = sql.Column(sql.String(64), primary_key=True) name = sql.Column(sql.String(64), nullable=False) - domain_id = sql.Column(sql.String(64), sql.ForeignKey('project.id'), - nullable=False) + domain_id = sql.Column( + sql.String(64), sql.ForeignKey('project.id'), nullable=False + ) description = sql.Column(sql.Text()) enabled = sql.Column(sql.Boolean) extra = sql.Column(sql.JsonBlob()) parent_id = sql.Column(sql.String(64), sql.ForeignKey('project.id')) - is_domain = sql.Column(sql.Boolean, default=False, nullable=False, - server_default='0') + is_domain = sql.Column( + sql.Boolean, default=False, nullable=False, server_default='0' + ) _tags = orm.relationship( 'ProjectTag', single_parent=True, lazy='subquery', cascade='all,delete-orphan', backref='project', - primaryjoin='and_(ProjectTag.project_id==Project.id)' + primaryjoin='and_(ProjectTag.project_id==Project.id)', ) _resource_option_mapper = orm.relationship( 'ProjectOption', @@ -82,7 +91,7 @@ class Project(sql.ModelBase, sql.ModelDictMixinWithExtras): cascade='all,delete,delete-orphan', lazy='subquery', backref='project', - collection_class=collections.attribute_mapped_collection('option_id') + collection_class=collections.attribute_mapped_collection('option_id'), ) # Unique constraint across two columns to create the separation @@ -115,18 +124,23 @@ class ProjectTag(sql.ModelBase, sql.ModelDictMixin): __tablename__ = 'project_tag' attributes = ['project_id', 'name'] project_id = sql.Column( - sql.String(64), sql.ForeignKey('project.id', ondelete='CASCADE'), - nullable=False, primary_key=True) + sql.String(64), + sql.ForeignKey('project.id', ondelete='CASCADE'), + nullable=False, + primary_key=True, + ) name = sql.Column(sql.Unicode(255), nullable=False, primary_key=True) class ProjectOption(sql.ModelBase): __tablename__ = 'project_option' - project_id = sql.Column(sql.String(64), - sql.ForeignKey('project.id', ondelete='CASCADE'), - nullable=False, primary_key=True) - option_id = sql.Column(sql.String(4), nullable=False, - primary_key=True) + project_id = sql.Column( + sql.String(64), + sql.ForeignKey('project.id', ondelete='CASCADE'), + nullable=False, + primary_key=True, + ) + option_id = sql.Column(sql.String(4), nullable=False, primary_key=True) option_value = sql.Column(sql.JsonBlob, nullable=True) def __init__(self, option_id, option_value): diff --git a/keystone/resource/config_backends/base.py b/keystone/resource/config_backends/base.py index bc256d219c..f3225880ef 100644 --- a/keystone/resource/config_backends/base.py +++ b/keystone/resource/config_backends/base.py @@ -64,8 +64,9 @@ class DomainConfigDriverBase(object, metaclass=abc.ABCMeta): raise exception.NotImplemented() # pragma: no cover @abc.abstractmethod - def list_config_options(self, domain_id, group=None, option=False, - sensitive=False): + def list_config_options( + self, domain_id, group=None, option=False, sensitive=False + ): """Get a config options for a domain. :param domain_id: the domain for this option diff --git a/keystone/resource/config_backends/sql.py b/keystone/resource/config_backends/sql.py index 02265c00dc..3e8c20a297 100644 --- a/keystone/resource/config_backends/sql.py +++ b/keystone/resource/config_backends/sql.py @@ -57,10 +57,12 @@ class DomainConfig(base.DomainConfigDriverBase): return WhiteListedConfig def _create_config_option( - self, session, domain_id, group, option, sensitive, value): + self, session, domain_id, group, option, sensitive, value + ): config_table = self.choose_table(sensitive) - ref = config_table(domain_id=domain_id, group=group, option=option, - value=value) + ref = config_table( + domain_id=domain_id, group=group, option=option, value=value + ) session.add(ref) def create_config_options(self, domain_id, option_list): @@ -71,30 +73,42 @@ class DomainConfig(base.DomainConfigDriverBase): query.delete(False) for option in option_list: self._create_config_option( - session, domain_id, option['group'], - option['option'], option['sensitive'], option['value']) + session, + domain_id, + option['group'], + option['option'], + option['sensitive'], + option['value'], + ) def _get_config_option(self, session, domain_id, group, option, sensitive): try: config_table = self.choose_table(sensitive) - ref = (session.query(config_table). - filter_by(domain_id=domain_id, group=group, - option=option).one()) + ref = ( + session.query(config_table) + .filter_by(domain_id=domain_id, group=group, option=option) + .one() + ) except sql.NotFound: msg = _('option %(option)s in group %(group)s') % { - 'group': group, 'option': option} + 'group': group, + 'option': option, + } raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) + domain_id=domain_id, group_or_option=msg + ) return ref def get_config_option(self, domain_id, group, option, sensitive=False): with sql.session_for_read() as session: - ref = self._get_config_option(session, domain_id, group, option, - sensitive) + ref = self._get_config_option( + session, domain_id, group, option, sensitive + ) return ref.to_dict() - def list_config_options(self, domain_id, group=None, option=None, - sensitive=False): + def list_config_options( + self, domain_id, group=None, option=None, sensitive=False + ): with sql.session_for_read() as session: config_table = self.choose_table(sensitive) query = session.query(config_table) @@ -109,10 +123,16 @@ class DomainConfig(base.DomainConfigDriverBase): with sql.session_for_write() as session: for option in option_list: self._delete_config_options( - session, domain_id, option['group'], option['option']) + session, domain_id, option['group'], option['option'] + ) self._create_config_option( - session, domain_id, option['group'], option['option'], - option['sensitive'], option['value']) + session, + domain_id, + option['group'], + option['option'], + option['sensitive'], + option['value'], + ) def _delete_config_options(self, session, domain_id, group, option): for config_table in [WhiteListedConfig, SensitiveConfig]: diff --git a/keystone/resource/core.py b/keystone/resource/core.py index 659540af05..00b9e7f6f0 100644 --- a/keystone/resource/core.py +++ b/keystone/resource/core.py @@ -79,7 +79,8 @@ class Manager(manager.Manager): max_depth = min(max_depth, limit_model.MAX_PROJECT_TREE_DEPTH + 1) if self._get_hierarchy_depth(parents_list) > max_depth: raise exception.ForbiddenNotSecurity( - _('Max hierarchy depth reached for %s branch.') % project_id) + _('Max hierarchy depth reached for %s branch.') % project_id + ) def _assert_is_domain_project_constraints(self, project_ref): """Enforce specific constraints of projects that act as domains. @@ -93,18 +94,23 @@ class Manager(manager.Manager): :raises keystone.exception.ValidationError: If one of the constraints was not satisfied. """ - if (not PROVIDERS.identity_api.multiple_domains_supported and - project_ref['id'] != CONF.identity.default_domain_id and - project_ref['id'] != base.NULL_DOMAIN_ID): + if ( + not PROVIDERS.identity_api.multiple_domains_supported + and project_ref['id'] != CONF.identity.default_domain_id + and project_ref['id'] != base.NULL_DOMAIN_ID + ): raise exception.ValidationError( - message=_('Multiple domains are not supported')) + message=_('Multiple domains are not supported') + ) self.assert_domain_not_federated(project_ref['id'], project_ref) if project_ref['parent_id']: raise exception.ValidationError( - message=_('only root projects are allowed to act as ' - 'domains.')) + message=_( + 'only root projects are allowed to act as ' 'domains.' + ) + ) def _assert_regular_project_constraints(self, project_ref): """Enforce regular project hierarchy constraints. @@ -126,22 +132,32 @@ class Manager(manager.Manager): if parent_ref['is_domain']: if parent_ref['id'] != domain['id']: raise exception.ValidationError( - message=_('Cannot create project, the parent ' - '(%(parent_id)s) is acting as a domain, ' - 'but this project\'s domain id (%(domain_id)s) ' - 'does not match the parent\'s id.') - % {'parent_id': parent_ref['id'], - 'domain_id': domain['id']}) + message=_( + 'Cannot create project, the parent ' + '(%(parent_id)s) is acting as a domain, ' + 'but this project\'s domain id (%(domain_id)s) ' + 'does not match the parent\'s id.' + ) + % { + 'parent_id': parent_ref['id'], + 'domain_id': domain['id'], + } + ) else: parent_domain_id = parent_ref.get('domain_id') if parent_domain_id != domain['id']: raise exception.ValidationError( - message=_('Cannot create project, since it specifies ' - 'its domain_id %(domain_id)s, but ' - 'specifies a parent in a different domain ' - '(%(parent_domain_id)s).') - % {'domain_id': domain['id'], - 'parent_domain_id': parent_domain_id}) + message=_( + 'Cannot create project, since it specifies ' + 'its domain_id %(domain_id)s, but ' + 'specifies a parent in a different domain ' + '(%(parent_domain_id)s).' + ) + % { + 'domain_id': domain['id'], + 'parent_domain_id': parent_domain_id, + } + ) def _enforce_project_constraints(self, project_ref): if project_ref.get('is_domain'): @@ -156,41 +172,61 @@ class Manager(manager.Manager): for ref in parents_list: if not ref.get('enabled', True): raise exception.ValidationError( - message=_('cannot create a project in a ' - 'branch containing a disabled ' - 'project: %s') % ref['id']) + message=_( + 'cannot create a project in a ' + 'branch containing a disabled ' + 'project: %s' + ) + % ref['id'] + ) - self._assert_max_hierarchy_depth(project_ref.get('parent_id'), - parents_list) + self._assert_max_hierarchy_depth( + project_ref.get('parent_id'), parents_list + ) def _raise_reserved_character_exception(self, entity_type, name): - msg = _('%(entity)s name cannot contain the following reserved ' - 'characters: %(chars)s') + msg = _( + '%(entity)s name cannot contain the following reserved ' + 'characters: %(chars)s' + ) raise exception.ValidationError( - message=msg % { + message=msg + % { 'entity': entity_type, - 'chars': utils.list_url_unsafe_chars(name) - }) + 'chars': utils.list_url_unsafe_chars(name), + } + ) def _generate_project_name_conflict_msg(self, project): if project['is_domain']: - return _('it is not permitted to have two projects ' - 'acting as domains with the same name: %s' - ) % project['name'] + return ( + _( + 'it is not permitted to have two projects ' + 'acting as domains with the same name: %s' + ) + % project['name'] + ) else: - return _('it is not permitted to have two projects ' - 'with either the same name or same id in ' - 'the same domain: ' - 'name is %(name)s, project id %(id)s' - ) % project + return ( + _( + 'it is not permitted to have two projects ' + 'with either the same name or same id in ' + 'the same domain: ' + 'name is %(name)s, project id %(id)s' + ) + % project + ) def create_project(self, project_id, project, initiator=None): project = project.copy() - if (CONF.resource.project_name_url_safe != 'off' and - utils.is_not_url_safe(project['name'])): - self._raise_reserved_character_exception('Project', - project['name']) + if ( + CONF.resource.project_name_url_safe != 'off' + and utils.is_not_url_safe(project['name']) + ): + self._raise_reserved_character_exception( + 'Project', project['name'] + ) project.setdefault('enabled', True) project['name'] = project['name'].strip() @@ -217,7 +253,8 @@ class Manager(manager.Manager): except exception.Conflict: raise exception.Conflict( type='project', - details=self._generate_project_name_conflict_msg(project)) + details=self._generate_project_name_conflict_msg(project), + ) if project.get('is_domain'): notifications.Audit.created(self._DOMAIN, project_id, initiator) @@ -225,8 +262,9 @@ class Manager(manager.Manager): notifications.Audit.created(self._PROJECT, project_id, initiator) if MEMOIZE.should_cache(ret): self.get_project.set(ret, self, project_id) - self.get_project_by_name.set(ret, self, ret['name'], - ret['domain_id']) + self.get_project_by_name.set( + ret, self, ret['name'], ret['domain_id'] + ) assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() @@ -256,12 +294,12 @@ class Manager(manager.Manager): # NOTE(marek-denis): We cannot create this attribute in the __init__ as # config values are always initialized to default value. federated_domain = CONF.federation.federated_domain_name.lower() - if (domain.get('name') and domain['name'].lower() == federated_domain): - raise AssertionError(_('Domain cannot be named %s') - % domain['name']) - if (domain_id.lower() == federated_domain): - raise AssertionError(_('Domain cannot have ID %s') - % domain_id) + if domain.get('name') and domain['name'].lower() == federated_domain: + raise AssertionError( + _('Domain cannot be named %s') % domain['name'] + ) + if domain_id.lower() == federated_domain: + raise AssertionError(_('Domain cannot have ID %s') % domain_id) def assert_project_enabled(self, project_id, project=None): """Assert the project is enabled and its associated domain is enabled. @@ -282,21 +320,27 @@ class Manager(manager.Manager): for project in parents_list: if not project.get('enabled', True): raise exception.ForbiddenNotSecurity( - _('Cannot enable project %s since it has disabled ' - 'parents') % project_id) + _( + 'Cannot enable project %s since it has disabled ' + 'parents' + ) + % project_id + ) def _is_immutable(self, project_ref): return project_ref['options'].get( - ro_opt.IMMUTABLE_OPT.option_name, False) + ro_opt.IMMUTABLE_OPT.option_name, False + ) def _check_whole_subtree_is_disabled(self, project_id, subtree_list=None): if not subtree_list: subtree_list = self.list_projects_in_subtree(project_id) subtree_enabled = [ref.get('enabled', True) for ref in subtree_list] - return (not any(subtree_enabled)) + return not any(subtree_enabled) - def _update_project(self, project_id, project, initiator=None, - cascade=False): + def _update_project( + self, project_id, project, initiator=None, cascade=False + ): # Use the driver directly to prevent using old cached value. original_project = self.driver.get_project(project_id) project = project.copy() @@ -308,7 +352,8 @@ class Manager(manager.Manager): original_resource_ref=original_project, new_resource_ref=project, type='domain', - resource_id=project_id) + resource_id=project_id, + ) domain = self._get_domain_from_project(original_project) self.assert_domain_not_federated(project_id, domain) url_safe_option = CONF.resource.domain_name_url_safe @@ -319,27 +364,37 @@ class Manager(manager.Manager): original_resource_ref=original_project, new_resource_ref=project, type='project', - resource_id=project_id) + resource_id=project_id, + ) url_safe_option = CONF.resource.project_name_url_safe exception_entity = 'Project' - project_name_changed = ('name' in project and project['name'] != - original_project['name']) - if (url_safe_option != 'off' and project_name_changed and - utils.is_not_url_safe(project['name'])): - self._raise_reserved_character_exception(exception_entity, - project['name']) + project_name_changed = ( + 'name' in project and project['name'] != original_project['name'] + ) + if ( + url_safe_option != 'off' + and project_name_changed + and utils.is_not_url_safe(project['name']) + ): + self._raise_reserved_character_exception( + exception_entity, project['name'] + ) elif project_name_changed: project['name'] = project['name'].strip() parent_id = original_project.get('parent_id') if 'parent_id' in project and project.get('parent_id') != parent_id: raise exception.ForbiddenNotSecurity( - _('Update of `parent_id` is not allowed.')) + _('Update of `parent_id` is not allowed.') + ) - if ('is_domain' in project and - project['is_domain'] != original_project['is_domain']): + if ( + 'is_domain' in project + and project['is_domain'] != original_project['is_domain'] + ): raise exception.ValidationError( - message=_('Update of `is_domain` is not allowed.')) + message=_('Update of `is_domain` is not allowed.') + ) original_project_enabled = original_project.get('enabled', True) project_enabled = project.get('enabled', True) @@ -352,40 +407,53 @@ class Manager(manager.Manager): # project acting as a domain to be disabled irrespective of the # state of its children. Disabling a project acting as domain # effectively disables its children. - if (not original_project.get('is_domain') and not cascade and not - self._check_whole_subtree_is_disabled(project_id)): + if ( + not original_project.get('is_domain') + and not cascade + and not self._check_whole_subtree_is_disabled(project_id) + ): raise exception.ForbiddenNotSecurity( - _('Cannot disable project %(project_id)s since its ' - 'subtree contains enabled projects.') - % {'project_id': project_id}) + _( + 'Cannot disable project %(project_id)s since its ' + 'subtree contains enabled projects.' + ) + % {'project_id': project_id} + ) - notifications.Audit.disabled(self._PROJECT, project_id, - public=False) + notifications.Audit.disabled( + self._PROJECT, project_id, public=False + ) # Drop the computed assignments if the project is being disabled. # This ensures an accurate list of projects is returned when # listing projects/domains for a user based on role assignments. assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() if cascade: - self._only_allow_enabled_to_update_cascade(project, - original_project) + self._only_allow_enabled_to_update_cascade( + project, original_project + ) self._update_project_enabled_cascade(project_id, project_enabled) try: - project['is_domain'] = (project.get('is_domain') or - original_project['is_domain']) + project['is_domain'] = ( + project.get('is_domain') or original_project['is_domain'] + ) ret = self.driver.update_project(project_id, project) except exception.Conflict: raise exception.Conflict( type='project', - details=self._generate_project_name_conflict_msg(project)) + details=self._generate_project_name_conflict_msg(project), + ) try: self.get_project.invalidate(self, project_id) - self.get_project_by_name.invalidate(self, original_project['name'], - original_project['domain_id']) - if ('domain_id' in project and - project['domain_id'] != original_project['domain_id']): + self.get_project_by_name.invalidate( + self, original_project['name'], original_project['domain_id'] + ) + if ( + 'domain_id' in project + and project['domain_id'] != original_project['domain_id'] + ): # If the project's domain_id has been updated, invalidate user # role assignments cache region, as it may be caching inherited # assignments from the old domain to the specified project @@ -394,8 +462,9 @@ class Manager(manager.Manager): # attempt to send audit event even if the cache invalidation raises notifications.Audit.updated(self._PROJECT, project_id, initiator) if original_project['is_domain']: - notifications.Audit.updated(self._DOMAIN, project_id, - initiator) + notifications.Audit.updated( + self._DOMAIN, project_id, initiator + ) # If the domain is being disabled, issue the disable # notification as well if original_project_enabled and not project_enabled: @@ -408,8 +477,9 @@ class Manager(manager.Manager): # requiring the authorization context to be rebuilt the # next time they're validated. token_provider.TOKENS_REGION.invalidate() - notifications.Audit.disabled(self._DOMAIN, project_id, - public=False) + notifications.Audit.disabled( + self._DOMAIN, project_id, public=False + ) return ret @@ -418,14 +488,18 @@ class Manager(manager.Manager): if attr != 'enabled': if project.get(attr) != original_project.get(attr): raise exception.ValidationError( - message=_('Cascade update is only allowed for ' - 'enabled attribute.')) + message=_( + 'Cascade update is only allowed for ' + 'enabled attribute.' + ) + ) def _update_project_enabled_cascade(self, project_id, enabled): subtree = self.list_projects_in_subtree(project_id) # Update enabled only if different from original value - subtree_to_update = [child for child in subtree - if child['enabled'] != enabled] + subtree_to_update = [ + child for child in subtree if child['enabled'] != enabled + ] for child in subtree_to_update: child['enabled'] = enabled @@ -433,13 +507,15 @@ class Manager(manager.Manager): # Does not in fact disable the project, only emits a # notification that it was disabled. The actual disablement # is done in the next line. - notifications.Audit.disabled(self._PROJECT, child['id'], - public=False) + notifications.Audit.disabled( + self._PROJECT, child['id'], public=False + ) self.driver.update_project(child['id'], child) - def update_project(self, project_id, project, initiator=None, - cascade=False): + def update_project( + self, project_id, project, initiator=None, cascade=False + ): ret = self._update_project(project_id, project, initiator, cascade) if ret['is_domain']: self.get_domain.invalidate(self, project_id) @@ -447,12 +523,14 @@ class Manager(manager.Manager): return ret - def _post_delete_cleanup_project(self, project_id, project, - initiator=None): + def _post_delete_cleanup_project( + self, project_id, project, initiator=None + ): try: self.get_project.invalidate(self, project_id) - self.get_project_by_name.invalidate(self, project['name'], - project['domain_id']) + self.get_project_by_name.invalidate( + self, project['name'], project['domain_id'] + ) PROVIDERS.assignment_api.delete_project_assignments(project_id) # Invalidate user role assignments cache region, as it may # be caching role assignments where the target is @@ -486,20 +564,27 @@ class Manager(manager.Manager): ro_opt.check_immutable_delete( resource_ref=project, resource_type='project', - resource_id=project['id']) + resource_id=project['id'], + ) project_id = project['id'] if project['is_domain'] and project['enabled']: raise exception.ValidationError( - message=_('cannot delete an enabled project acting as a ' - 'domain. Please disable the project %s first.') - % project.get('id')) + message=_( + 'cannot delete an enabled project acting as a ' + 'domain. Please disable the project %s first.' + ) + % project.get('id') + ) if not self.is_leaf_project(project_id) and not cascade: raise exception.ForbiddenNotSecurity( - _('Cannot delete the project %s since it is not a leaf in the ' - 'hierarchy. Use the cascade option if you want to delete a ' - 'whole subtree.') - % project_id) + _( + 'Cannot delete the project %s since it is not a leaf in the ' + 'hierarchy. Use the cascade option if you want to delete a ' + 'whole subtree.' + ) + % project_id + ) if cascade: # Getting reversed project's subtrees list, i.e. from the leaves @@ -507,11 +592,15 @@ class Manager(manager.Manager): subtree_list = self.list_projects_in_subtree(project_id) subtree_list.reverse() if not self._check_whole_subtree_is_disabled( - project_id, subtree_list=subtree_list): + project_id, subtree_list=subtree_list + ): raise exception.ForbiddenNotSecurity( - _('Cannot delete project %(project_id)s since its subtree ' - 'contains enabled projects.') - % {'project_id': project_id}) + _( + 'Cannot delete project %(project_id)s since its subtree ' + 'contains enabled projects.' + ) + % {'project_id': project_id} + ) project_list = subtree_list + [project] projects_ids = [x['id'] for x in project_list] @@ -538,8 +627,9 @@ class Manager(manager.Manager): ) user_projects_ids = set([proj['id'] for proj in user_projects]) # Keep only the projects present in user_projects - return [proj for proj in projects_list - if proj['id'] in user_projects_ids] + return [ + proj for proj in projects_list if proj['id'] in user_projects_ids + ] def _assert_valid_project_id(self, project_id): if project_id is None: @@ -560,8 +650,9 @@ class Manager(manager.Manager): limits = PROVIDERS.unified_limit_api.list_limits(hints) project['limits'] = limits - def list_project_parents(self, project_id, user_id=None, - include_limits=False): + def list_project_parents( + self, project_id, user_id=None, include_limits=False + ): self._assert_valid_project_id(project_id) parents = self.driver.list_project_parents(project_id) # If a user_id was provided, the returned list should be filtered @@ -612,11 +703,13 @@ class Manager(manager.Manager): """ parents_list = self.list_project_parents(project['id']) parents_as_ids = self._build_parents_as_ids_dict( - project, {proj['id']: proj for proj in parents_list}) + project, {proj['id']: proj for proj in parents_list} + ) return parents_as_ids - def list_projects_in_subtree(self, project_id, user_id=None, - include_limits=False): + def list_projects_in_subtree( + self, project_id, user_id=None, include_limits=False + ): self._assert_valid_project_id(project_id) subtree = self.driver.list_projects_in_subtree(project_id) # If a user_id was provided, the returned list should be filtered @@ -641,7 +734,8 @@ class Manager(manager.Manager): children_ids = {} for child in children: children_ids[child['id']] = traverse_subtree_hierarchy( - child['id']) + child['id'] + ) return children_ids return traverse_subtree_hierarchy(project_id) @@ -670,6 +764,7 @@ class Manager(manager.Manager): } """ + def _projects_indexed_by_parent(projects_list): projects_by_parent = {} for proj in projects_list: @@ -683,7 +778,8 @@ class Manager(manager.Manager): subtree_list = self.list_projects_in_subtree(project_id) subtree_as_ids = self._build_subtree_as_ids_dict( - project_id, _projects_indexed_by_parent(subtree_list)) + project_id, _projects_indexed_by_parent(subtree_list) + ) return subtree_as_ids def list_domains_from_ids(self, domain_ids): @@ -700,8 +796,9 @@ class Manager(manager.Manager): # Retrieve the projects acting as domains get their correspondent # domains projects = self.list_projects_from_ids(domain_ids) - domains = [self._get_domain_from_project(project) - for project in projects] + domains = [ + self._get_domain_from_project(project) for project in projects + ] return domains @@ -724,8 +821,9 @@ class Manager(manager.Manager): def get_domain_by_name(self, domain_name): try: # Retrieve the corresponding project that acts as a domain - project = self.driver.get_project_by_name(domain_name, - domain_id=None) + project = self.driver.get_project_by_name( + domain_name, domain_id=None + ) except exception.ProjectNotFound: raise exception.DomainNotFound(domain_id=domain_name) @@ -739,12 +837,16 @@ class Manager(manager.Manager): result can be returned in response to a domain API call. """ if not project_ref['is_domain']: - LOG.error('Asked to convert a non-domain project into a ' - 'domain - Domain: %(domain_id)s, Project ID: ' - '%(id)s, Project Name: %(project_name)s', - {'domain_id': project_ref['domain_id'], - 'id': project_ref['id'], - 'project_name': project_ref['name']}) + LOG.error( + 'Asked to convert a non-domain project into a ' + 'domain - Domain: %(domain_id)s, Project ID: ' + '%(id)s, Project Name: %(project_name)s', + { + 'domain_id': project_ref['domain_id'], + 'id': project_ref['id'], + 'project_name': project_ref['name'], + }, + ) raise exception.DomainNotFound(domain_id=project_ref['id']) domain_ref = project_ref.copy() @@ -758,20 +860,24 @@ class Manager(manager.Manager): return domain_ref def create_domain(self, domain_id, domain, initiator=None): - if (CONF.resource.domain_name_url_safe != 'off' and - utils.is_not_url_safe(domain['name'])): + if ( + CONF.resource.domain_name_url_safe != 'off' + and utils.is_not_url_safe(domain['name']) + ): self._raise_reserved_character_exception('Domain', domain['name']) project_from_domain = base.get_project_from_domain(domain) is_domain_project = self.create_project( - domain_id, project_from_domain, initiator) + domain_id, project_from_domain, initiator + ) return self._get_domain_from_project(is_domain_project) @manager.response_truncated def list_domains(self, hints=None): projects = self.list_projects_acting_as_domain(hints) - domains = [self._get_domain_from_project(project) - for project in projects] + domains = [ + self._get_domain_from_project(project) for project in projects + ] return domains def update_domain(self, domain_id, domain, initiator=None): @@ -806,21 +912,23 @@ class Manager(manager.Manager): ro_opt.check_immutable_delete( resource_ref=domain, resource_type='domain', - resource_id=domain['id']) + resource_id=domain['id'], + ) # To help avoid inadvertent deletes, we insist that the domain # has been previously disabled. This also prevents a user deleting # their own domain since, once it is disabled, they won't be able # to get a valid token to issue this delete. if domain['enabled']: raise exception.ForbiddenNotSecurity( - _('Cannot delete a domain that is enabled, please disable it ' - 'first.')) + _( + 'Cannot delete a domain that is enabled, please disable it ' + 'first.' + ) + ) domain_id = domain['id'] self._delete_domain_contents(domain_id) - notifications.Audit.internal( - notifications.DOMAIN_DELETED, domain_id - ) + notifications.Audit.internal(notifications.DOMAIN_DELETED, domain_id) self._delete_project(domain, initiator) try: self.get_domain.invalidate(self, domain_id) @@ -842,27 +950,36 @@ class Manager(manager.Manager): associated with them as well as revoking any relevant tokens. """ + def _delete_projects(project, projects, examined): if project['id'] in examined: - msg = ('Circular reference or a repeated entry found ' - 'projects hierarchy - %(project_id)s.') + msg = ( + 'Circular reference or a repeated entry found ' + 'projects hierarchy - %(project_id)s.' + ) LOG.error(msg, {'project_id': project['id']}) return examined.add(project['id']) - children = [proj for proj in projects - if proj.get('parent_id') == project['id']] + children = [ + proj + for proj in projects + if proj.get('parent_id') == project['id'] + ] for proj in children: _delete_projects(proj, projects, examined) try: self._delete_project(project, initiator=None) except exception.ProjectNotFound: - LOG.debug(('Project %(projectid)s not found when ' - 'deleting domain contents for %(domainid)s, ' - 'continuing with cleanup.'), - {'projectid': project['id'], - 'domainid': domain_id}) + LOG.debug( + ( + 'Project %(projectid)s not found when ' + 'deleting domain contents for %(domainid)s, ' + 'continuing with cleanup.' + ), + {'projectid': project['id'], 'domainid': domain_id}, + ) proj_refs = self.list_projects_in_domain(domain_id) @@ -896,7 +1013,8 @@ class Manager(manager.Manager): def list_projects_acting_as_domain(self, hints=None): return self.driver.list_projects_acting_as_domain( - hints or driver_hints.Hints()) + hints or driver_hints.Hints() + ) @MEMOIZE def get_project(self, project_id): @@ -936,13 +1054,15 @@ class Manager(manager.Manager): message=_( 'Cannot create project tags for %(project_id)s, project ' 'is immutable. Set "immutable" option to false before ' - 'creating project tags.') % {'project_id': project_id}) + 'creating project tags.' + ) + % {'project_id': project_id} + ) tag_name = tag.strip() project['tags'].append(tag_name) self.update_project(project_id, {'tags': project['tags']}) - notifications.Audit.created( - self._PROJECT_TAG, tag_name, initiator) + notifications.Audit.created(self._PROJECT_TAG, tag_name, initiator) return tag_name def get_project_tag(self, project_id, tag_name): @@ -984,7 +1104,10 @@ class Manager(manager.Manager): message=_( 'Cannot update project tags for %(project_id)s, project ' 'is immutable. Set "immutable" option to false before ' - 'creating project tags.') % {'project_id': project_id}) + 'creating project tags.' + ) + % {'project_id': project_id} + ) tag_list = [t.strip() for t in tags] project = {'tags': tag_list} self.update_project(project_id, project) @@ -1005,7 +1128,10 @@ class Manager(manager.Manager): message=_( 'Cannot delete project tags for %(project_id)s, project ' 'is immutable. Set "immutable" option to false before ' - 'creating project tags.') % {'project_id': project_id}) + 'creating project tags.' + ) + % {'project_id': project_id} + ) try: project['tags'].remove(tag) except ValueError: @@ -1018,8 +1144,9 @@ class Manager(manager.Manager): if max_depth: exceeded_project_ids = self.driver.check_project_depth(max_depth) if exceeded_project_ids: - raise exception.LimitTreeExceedError(exceeded_project_ids, - max_depth) + raise exception.LimitTreeExceedError( + exceeded_project_ids, max_depth + ) MEMOIZE_CONFIG = cache.get_memoization_decorator(group='domain_config') @@ -1048,31 +1175,58 @@ class DomainConfigManager(manager.Manager): whitelisted_options = { 'identity': ['driver', 'list_limit'], 'ldap': [ - 'url', 'user', 'suffix', 'query_scope', 'page_size', - 'alias_dereferencing', 'debug_level', 'chase_referrals', - 'user_tree_dn', 'user_filter', 'user_objectclass', - 'user_id_attribute', 'user_name_attribute', 'user_mail_attribute', - 'user_description_attribute', 'user_pass_attribute', - 'user_enabled_attribute', 'user_enabled_invert', - 'user_enabled_mask', 'user_enabled_default', - 'user_attribute_ignore', 'user_default_project_id_attribute', - 'user_enabled_emulation', 'user_enabled_emulation_dn', + 'url', + 'user', + 'suffix', + 'query_scope', + 'page_size', + 'alias_dereferencing', + 'debug_level', + 'chase_referrals', + 'user_tree_dn', + 'user_filter', + 'user_objectclass', + 'user_id_attribute', + 'user_name_attribute', + 'user_mail_attribute', + 'user_description_attribute', + 'user_pass_attribute', + 'user_enabled_attribute', + 'user_enabled_invert', + 'user_enabled_mask', + 'user_enabled_default', + 'user_attribute_ignore', + 'user_default_project_id_attribute', + 'user_enabled_emulation', + 'user_enabled_emulation_dn', 'user_enabled_emulation_use_group_config', - 'user_additional_attribute_mapping', 'group_tree_dn', - 'group_filter', 'group_objectclass', 'group_id_attribute', - 'group_name_attribute', 'group_members_are_ids', - 'group_member_attribute', 'group_desc_attribute', - 'group_attribute_ignore', 'group_additional_attribute_mapping', - 'tls_cacertfile', 'tls_cacertdir', 'use_tls', 'tls_req_cert', - 'use_pool', 'pool_size', 'pool_retry_max', 'pool_retry_delay', - 'pool_connection_timeout', 'pool_connection_lifetime', - 'use_auth_pool', 'auth_pool_size', 'auth_pool_connection_lifetime' - ] - } - sensitive_options = { - 'identity': [], - 'ldap': ['password'] + 'user_additional_attribute_mapping', + 'group_tree_dn', + 'group_filter', + 'group_objectclass', + 'group_id_attribute', + 'group_name_attribute', + 'group_members_are_ids', + 'group_member_attribute', + 'group_desc_attribute', + 'group_attribute_ignore', + 'group_additional_attribute_mapping', + 'tls_cacertfile', + 'tls_cacertdir', + 'use_tls', + 'tls_req_cert', + 'use_pool', + 'pool_size', + 'pool_retry_max', + 'pool_retry_delay', + 'pool_connection_timeout', + 'pool_connection_lifetime', + 'use_auth_pool', + 'auth_pool_size', + 'auth_pool_connection_lifetime', + ], } + sensitive_options = {'identity': [], 'ldap': ['password']} def __init__(self): super(DomainConfigManager, self).__init__(CONF.domain_config.driver) @@ -1089,15 +1243,16 @@ class DomainConfigManager(manager.Manager): # Something must be defined in the request if not config: raise exception.InvalidDomainConfig( - reason=_('No options specified')) + reason=_('No options specified') + ) # Make sure the groups/options defined in config itself are valid for group in config: - if (not config[group] or not - isinstance(config[group], dict)): - msg = _('The value of group %(group)s specified in the ' - 'config should be a dictionary of options') % { - 'group': group} + if not config[group] or not isinstance(config[group], dict): + msg = _( + 'The value of group %(group)s specified in the ' + 'config should be a dictionary of options' + ) % {'group': group} raise exception.InvalidDomainConfig(reason=msg) for option in config[group]: self._assert_valid_group_and_option(group, option) @@ -1119,30 +1274,40 @@ class DomainConfigManager(manager.Manager): if not group and option: # Our API structure should prevent this from ever happening, so if # it does, then this is coding error. - msg = _('Option %(option)s found with no group specified while ' - 'checking domain configuration request') % { - 'option': option} + msg = _( + 'Option %(option)s found with no group specified while ' + 'checking domain configuration request' + ) % {'option': option} raise exception.UnexpectedError(exception=msg) if CONF.domain_config.additional_whitelisted_options: self.whitelisted_options.update( - **CONF.domain_config.additional_whitelisted_options) + **CONF.domain_config.additional_whitelisted_options + ) if CONF.domain_config.additional_sensitive_options: self.sensitive_options.update( - **CONF.domain_config.additional_sensitive_options) + **CONF.domain_config.additional_sensitive_options + ) - if (group and group not in self.whitelisted_options and - group not in self.sensitive_options): - msg = _('Group %(group)s is not supported ' - 'for domain specific configurations') % {'group': group} + if ( + group + and group not in self.whitelisted_options + and group not in self.sensitive_options + ): + msg = _( + 'Group %(group)s is not supported ' + 'for domain specific configurations' + ) % {'group': group} raise exception.InvalidDomainConfig(reason=msg) if option: - if (option not in self.whitelisted_options.get(group, {}) - and option not in self.sensitive_options.get(group, {})): - msg = _('Option %(option)s in group %(group)s is not ' - 'supported for domain specific configurations') % { - 'group': group, 'option': option} + if option not in self.whitelisted_options.get( + group, {} + ) and option not in self.sensitive_options.get(group, {}): + msg = _( + 'Option %(option)s in group %(group)s is not ' + 'supported for domain specific configurations' + ) % {'group': group, 'option': option} raise exception.InvalidDomainConfig(reason=msg) def _is_sensitive(self, group, option): @@ -1153,17 +1318,24 @@ class DomainConfigManager(manager.Manager): option_list = [] for group in config: for option in config[group]: - option_list.append({ - 'group': group, 'option': option, - 'value': config[group][option], - 'sensitive': self._is_sensitive(group, option)}) + option_list.append( + { + 'group': group, + 'option': option, + 'value': config[group][option], + 'sensitive': self._is_sensitive(group, option), + } + ) return option_list def _option_dict(self, group, option): group_attr = getattr(CONF, group) - return {'group': group, 'option': option, - 'value': getattr(group_attr, option)} + return { + 'group': group, + 'option': option, + 'value': getattr(group_attr, option), + } def _list_to_config(self, whitelisted, sensitive=None, req_option=None): """Build config dict from a list of option dicts. @@ -1189,14 +1361,22 @@ class DomainConfigManager(manager.Manager): # there is only one option in the answer (and that it's the right # one) - if not, something has gone wrong and we raise an error if len(the_list) > 1 or the_list[0]['option'] != req_option: - LOG.error('Unexpected results in response for domain ' - 'config - %(count)s responses, first option is ' - '%(option)s, expected option %(expected)s', - {'count': len(the_list), 'option': list[0]['option'], - 'expected': req_option}) + LOG.error( + 'Unexpected results in response for domain ' + 'config - %(count)s responses, first option is ' + '%(option)s, expected option %(expected)s', + { + 'count': len(the_list), + 'option': list[0]['option'], + 'expected': req_option, + }, + ) raise exception.UnexpectedError( - _('An unexpected error occurred when retrieving domain ' - 'configs')) + _( + 'An unexpected error occurred when retrieving domain ' + 'configs' + ) + ) return {the_list[0]['option']: the_list[0]['value']} config = {} @@ -1265,13 +1445,16 @@ class DomainConfigManager(manager.Manager): if option: msg = _('option %(option)s in group %(group)s') % { - 'group': group, 'option': option} + 'group': group, + 'option': option, + } elif group: msg = _('group %(group)s') % {'group': group} else: msg = _('any options') raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) + domain_id=domain_id, group_or_option=msg + ) def get_security_compliance_config(self, domain_id, group, option=None): r"""Get full or partial security compliance config from configuration. @@ -1299,17 +1482,21 @@ class DomainConfigManager(manager.Manager): """ if domain_id != CONF.identity.default_domain_id: - msg = _('Reading security compliance information for any domain ' - 'other than the default domain is not allowed or ' - 'supported.') + msg = _( + 'Reading security compliance information for any domain ' + 'other than the default domain is not allowed or ' + 'supported.' + ) raise exception.InvalidDomainConfig(reason=msg) config_list = [] readable_options = ['password_regex', 'password_regex_description'] if option and option not in readable_options: - msg = _('Reading security compliance values other than ' - 'password_regex and password_regex_description is not ' - 'allowed.') + msg = _( + 'Reading security compliance values other than ' + 'password_regex and password_regex_description is not ' + 'allowed.' + ) raise exception.InvalidDomainConfig(reason=msg) elif option and option in readable_options: config_list.append(self._option_dict(group, option)) @@ -1345,6 +1532,7 @@ class DomainConfigManager(manager.Manager): support or one that does not exist in the original config """ + def _assert_valid_update(domain_id, config, group=None, option=None): """Ensure the combination of config, group and option is valid.""" self._assert_valid_config(config) @@ -1359,49 +1547,61 @@ class DomainConfigManager(manager.Manager): if group: if len(config) != 1 or (option and len(config[group]) != 1): if option: - msg = _('Trying to update option %(option)s in group ' - '%(group)s, so that, and only that, option ' - 'must be specified in the config') % { - 'group': group, 'option': option} + msg = _( + 'Trying to update option %(option)s in group ' + '%(group)s, so that, and only that, option ' + 'must be specified in the config' + ) % {'group': group, 'option': option} else: - msg = _('Trying to update group %(group)s, so that, ' - 'and only that, group must be specified in ' - 'the config') % {'group': group} + msg = _( + 'Trying to update group %(group)s, so that, ' + 'and only that, group must be specified in ' + 'the config' + ) % {'group': group} raise exception.InvalidDomainConfig(reason=msg) # So we now know we have the right number of entries in the # config that align with a group/option being specified, but we # must also make sure they match. if group not in config: - msg = _('request to update group %(group)s, but config ' - 'provided contains group %(group_other)s ' - 'instead') % { - 'group': group, - 'group_other': list(config.keys())[0]} + msg = _( + 'request to update group %(group)s, but config ' + 'provided contains group %(group_other)s ' + 'instead' + ) % {'group': group, 'group_other': list(config.keys())[0]} raise exception.InvalidDomainConfig(reason=msg) if option and option not in config[group]: - msg = _('Trying to update option %(option)s in group ' - '%(group)s, but config provided contains option ' - '%(option_other)s instead') % { - 'group': group, 'option': option, - 'option_other': list(config[group].keys())[0]} + msg = _( + 'Trying to update option %(option)s in group ' + '%(group)s, but config provided contains option ' + '%(option_other)s instead' + ) % { + 'group': group, + 'option': option, + 'option_other': list(config[group].keys())[0], + } raise exception.InvalidDomainConfig(reason=msg) # Finally, we need to check if the group/option specified # already exists in the original config - since if not, to keep # with the semantics of an update, we need to fail with # a DomainConfigNotFound - if not self._get_config_with_sensitive_info(domain_id, - group, option): + if not self._get_config_with_sensitive_info( + domain_id, group, option + ): if option: msg = _('option %(option)s in group %(group)s') % { - 'group': group, 'option': option} + 'group': group, + 'option': option, + } raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) + domain_id=domain_id, group_or_option=msg + ) else: msg = _('group %(group)s') % {'group': group} raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) + domain_id=domain_id, group_or_option=msg + ) update_config = config if group and option: @@ -1447,18 +1647,23 @@ class DomainConfigManager(manager.Manager): if not current_group: msg = _('group %(group)s') % {'group': group} raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) + domain_id=domain_id, group_or_option=msg + ) if option and not current_group.get(option): msg = _('option %(option)s in group %(group)s') % { - 'group': group, 'option': option} + 'group': group, + 'option': option, + } raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) + domain_id=domain_id, group_or_option=msg + ) self.delete_config_options(domain_id, group, option) self.get_config_with_sensitive_info.invalidate(self, domain_id) - def _get_config_with_sensitive_info(self, domain_id, group=None, - option=None): + def _get_config_with_sensitive_info( + self, domain_id, group=None, option=None + ): """Get config for a domain/group/option with sensitive info included. This is only used by the methods within this class, which may need to @@ -1466,8 +1671,9 @@ class DomainConfigManager(manager.Manager): """ whitelisted = self.list_config_options(domain_id, group, option) - sensitive = self.list_config_options(domain_id, group, option, - sensitive=True) + sensitive = self.list_config_options( + domain_id, group, option, sensitive=True + ) # Check if there are any sensitive substitutions needed. We first try # and simply ensure any sensitive options that have valid substitution @@ -1493,27 +1699,34 @@ class DomainConfigManager(manager.Manager): warning_msg = '' try: each_whitelisted['value'] = ( - each_whitelisted['value'] % sensitive_dict) + each_whitelisted['value'] % sensitive_dict + ) except KeyError: warning_msg = ( 'Found what looks like an unmatched config option ' 'substitution reference - domain: %(domain)s, group: ' '%(group)s, option: %(option)s, value: %(value)s. Perhaps ' 'the config option to which it refers has yet to be ' - 'added?') + 'added?' + ) except (ValueError, TypeError): warning_msg = ( 'Found what looks like an incorrectly constructed ' 'config option substitution reference - domain: ' '%(domain)s, group: %(group)s, option: %(option)s, ' - 'value: %(value)s.') + 'value: %(value)s.' + ) if warning_msg: - LOG.warning(warning_msg, { - 'domain': domain_id, - 'group': each_whitelisted['group'], - 'option': each_whitelisted['option'], - 'value': original_value}) + LOG.warning( + warning_msg, + { + 'domain': domain_id, + 'group': each_whitelisted['group'], + 'option': each_whitelisted['option'], + 'value': original_value, + }, + ) return self._list_to_config(whitelisted, sensitive) @@ -1558,9 +1771,10 @@ class DomainConfigManager(manager.Manager): if group: if option: if option not in self.whitelisted_options[group]: - msg = _('Reading the default for option %(option)s in ' - 'group %(group)s is not supported') % { - 'option': option, 'group': group} + msg = _( + 'Reading the default for option %(option)s in ' + 'group %(group)s is not supported' + ) % {'option': option, 'group': group} raise exception.InvalidDomainConfig(reason=msg) config_list.append(self._option_dict(group, option)) else: diff --git a/keystone/resource/schema.py b/keystone/resource/schema.py index 404b4a89ed..687d75f027 100644 --- a/keystone/resource/schema.py +++ b/keystone/resource/schema.py @@ -18,7 +18,7 @@ _name_properties = { 'type': 'string', 'minLength': 1, 'maxLength': 64, - 'pattern': r'[\S]+' + 'pattern': r'[\S]+', } _project_tag_name_properties = { @@ -29,7 +29,7 @@ _project_tag_name_properties = { # guidelines as set by the API-WG, which matches anything that # does not contain a '/' or ','. # https://specs.openstack.org/openstack/api-wg/guidelines/tags.html - 'pattern': '^[^,/]*$' + 'pattern': '^[^,/]*$', } _project_tags_list_properties = { @@ -37,7 +37,7 @@ _project_tags_list_properties = { 'items': _project_tag_name_properties, 'required': [], 'maxItems': 80, - 'uniqueItems': True + 'uniqueItems': True, } _project_properties = { @@ -49,7 +49,7 @@ _project_properties = { 'parent_id': validation.nullable(parameter_types.id_string), 'name': _name_properties, 'tags': _project_tags_list_properties, - 'options': ro.PROJECT_OPTIONS_REGISTRY.json_schema + 'options': ro.PROJECT_OPTIONS_REGISTRY.json_schema, } # This is for updating a single project tag via the URL @@ -65,7 +65,7 @@ project_create = { # project creation according to the Identity V3 API. We should think # about using the maxProperties validator here, and in update. 'required': ['name'], - 'additionalProperties': True + 'additionalProperties': True, } project_update = { @@ -73,14 +73,14 @@ project_update = { 'properties': _project_properties, # NOTE(lbragstad): Make sure at least one property is being updated 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } _domain_properties = { 'description': validation.nullable(parameter_types.description), 'enabled': parameter_types.boolean, 'name': _name_properties, - 'tags': project_tags_update + 'tags': project_tags_update, } domain_create = { @@ -90,12 +90,12 @@ domain_create = { # the current implementation in assignment.controller:DomainV3 requires a # name for the domain. 'required': ['name'], - 'additionalProperties': True + 'additionalProperties': True, } domain_update = { 'type': 'object', 'properties': _domain_properties, 'minProperties': 1, - 'additionalProperties': True + 'additionalProperties': True, } diff --git a/keystone/revoke/backends/base.py b/keystone/revoke/backends/base.py index 52ee957dca..c100ca5936 100644 --- a/keystone/revoke/backends/base.py +++ b/keystone/revoke/backends/base.py @@ -26,7 +26,8 @@ CONF = keystone.conf.CONF def revoked_before_cutoff_time(): expire_delta = datetime.timedelta( - seconds=CONF.token.expiration + CONF.revoke.expiration_buffer) + seconds=CONF.token.expiration + CONF.revoke.expiration_buffer + ) oldest = timeutils.utcnow() - expire_delta return oldest diff --git a/keystone/revoke/backends/sql.py b/keystone/revoke/backends/sql.py index 28beea03e8..041faa583e 100644 --- a/keystone/revoke/backends/sql.py +++ b/keystone/revoke/backends/sql.py @@ -39,12 +39,21 @@ class RevocationEvent(sql.ModelBase, sql.ModelDictMixin): audit_id = sql.Column(sql.String(32)) audit_chain_id = sql.Column(sql.String(32)) __table_args__ = ( - sql.Index('ix_revocation_event_project_id_issued_before', 'project_id', - 'issued_before'), - sql.Index('ix_revocation_event_user_id_issued_before', 'user_id', - 'issued_before'), - sql.Index('ix_revocation_event_audit_id_issued_before', - 'audit_id', 'issued_before'), + sql.Index( + 'ix_revocation_event_project_id_issued_before', + 'project_id', + 'issued_before', + ), + sql.Index( + 'ix_revocation_event_user_id_issued_before', + 'user_id', + 'issued_before', + ), + sql.Index( + 'ix_revocation_event_audit_id_issued_before', + 'audit_id', + 'issued_before', + ), ) @@ -77,8 +86,9 @@ class Revoke(base.RevokeDriverBase): query = session.query(RevocationEvent.id) query = query.filter(RevocationEvent.revoked_at < oldest) query = query.limit(batch_size).subquery() - delete_query = (session.query(RevocationEvent). - filter(RevocationEvent.id.in_(query))) + delete_query = session.query(RevocationEvent).filter( + RevocationEvent.id.in_(query) + ) while True: rowcount = delete_query.delete(synchronize_session=False) if rowcount == 0: @@ -93,7 +103,8 @@ class Revoke(base.RevokeDriverBase): def _list_token_events(self, token): with sql.session_for_read() as session: query = session.query(RevocationEvent).filter( - RevocationEvent.issued_before >= token['issued_at']) + RevocationEvent.issued_before >= token['issued_at'] + ) user = [RevocationEvent.user_id.is_(None)] proj = [RevocationEvent.project_id.is_(None)] audit = [RevocationEvent.audit_id.is_(None)] @@ -110,17 +121,22 @@ class Revoke(base.RevokeDriverBase): audit.append(RevocationEvent.audit_id == token['audit_id']) if token['trust_id']: trust.append(RevocationEvent.trust_id == token['trust_id']) - query = query.filter(sqlalchemy.and_(sqlalchemy.or_(*user), - sqlalchemy.or_(*proj), - sqlalchemy.or_(*audit), - sqlalchemy.or_(*trust))) + query = query.filter( + sqlalchemy.and_( + sqlalchemy.or_(*user), + sqlalchemy.or_(*proj), + sqlalchemy.or_(*audit), + sqlalchemy.or_(*trust), + ) + ) events = [revoke_model.RevokeEvent(**e.to_dict()) for e in query] return events def _list_last_fetch_events(self, last_fetch=None): with sql.session_for_read() as session: query = session.query(RevocationEvent).order_by( - RevocationEvent.revoked_at) + RevocationEvent.revoked_at + ) if last_fetch: query = query.filter(RevocationEvent.revoked_at > last_fetch) diff --git a/keystone/revoke/core.py b/keystone/revoke/core.py index a236364c4d..45d67b5650 100644 --- a/keystone/revoke/core.py +++ b/keystone/revoke/core.py @@ -27,9 +27,7 @@ CONF = keystone.conf.CONF # return a filtered list based upon last fetchtime. This is deprecated but # must be maintained. REVOKE_REGION = cache.create_region(name='revoke') -MEMOIZE = cache.get_memoization_decorator( - group='revoke', - region=REVOKE_REGION) +MEMOIZE = cache.get_memoization_decorator(group='revoke', region=REVOKE_REGION) class Manager(manager.Manager): @@ -57,24 +55,23 @@ class Manager(manager.Manager): def list_events(self, last_fetch=None): return self._list_events(last_fetch) - def _user_callback(self, service, resource_type, operation, - payload): + def _user_callback(self, service, resource_type, operation, payload): self.revoke_by_user(payload['resource_info']) - def _project_callback(self, service, resource_type, operation, - payload): + def _project_callback(self, service, resource_type, operation, payload): self.revoke( - revoke_model.RevokeEvent(project_id=payload['resource_info'])) + revoke_model.RevokeEvent(project_id=payload['resource_info']) + ) - def _trust_callback(self, service, resource_type, operation, - payload): + def _trust_callback(self, service, resource_type, operation, payload): self.revoke( - revoke_model.RevokeEvent(trust_id=payload['resource_info'])) + revoke_model.RevokeEvent(trust_id=payload['resource_info']) + ) - def _consumer_callback(self, service, resource_type, operation, - payload): + def _consumer_callback(self, service, resource_type, operation, payload): self.revoke( - revoke_model.RevokeEvent(consumer_id=payload['resource_info'])) + revoke_model.RevokeEvent(consumer_id=payload['resource_info']) + ) def _register_listeners(self): callbacks = { @@ -84,43 +81,53 @@ class Manager(manager.Manager): ['user', self._user_callback], ['project', self._project_callback], ], - notifications.ACTIONS.disabled: [ - ['user', self._user_callback] - ], + notifications.ACTIONS.disabled: [['user', self._user_callback]], notifications.ACTIONS.internal: [ - [notifications.PERSIST_REVOCATION_EVENT_FOR_USER, - self._user_callback], - ] + [ + notifications.PERSIST_REVOCATION_EVENT_FOR_USER, + self._user_callback, + ], + ], } for event, cb_info in callbacks.items(): for resource_type, callback_fns in cb_info: - notifications.register_event_callback(event, resource_type, - callback_fns) + notifications.register_event_callback( + event, resource_type, callback_fns + ) def revoke_by_user(self, user_id): return self.revoke(revoke_model.RevokeEvent(user_id=user_id)) - def _assert_not_domain_and_project_scoped(self, domain_id=None, - project_id=None): + def _assert_not_domain_and_project_scoped( + self, domain_id=None, project_id=None + ): if domain_id is not None and project_id is not None: - msg = _('The revoke call must not have both domain_id and ' - 'project_id. This is a bug in the Keystone server. The ' - 'current request is aborted.') + msg = _( + 'The revoke call must not have both domain_id and ' + 'project_id. This is a bug in the Keystone server. The ' + 'current request is aborted.' + ) raise exception.UnexpectedError(exception=msg) def revoke_by_audit_id(self, audit_id): self.revoke(revoke_model.RevokeEvent(audit_id=audit_id)) - def revoke_by_audit_chain_id(self, audit_chain_id, project_id=None, - domain_id=None): + def revoke_by_audit_chain_id( + self, audit_chain_id, project_id=None, domain_id=None + ): - self._assert_not_domain_and_project_scoped(domain_id=domain_id, - project_id=project_id) + self._assert_not_domain_and_project_scoped( + domain_id=domain_id, project_id=project_id + ) - self.revoke(revoke_model.RevokeEvent(audit_chain_id=audit_chain_id, - domain_id=domain_id, - project_id=project_id)) + self.revoke( + revoke_model.RevokeEvent( + audit_chain_id=audit_chain_id, + domain_id=domain_id, + project_id=project_id, + ) + ) def check_token(self, token): """Check the values from a token against the revocation list. @@ -132,8 +139,9 @@ class Manager(manager.Manager): :raises keystone.exception.TokenNotFound: If the token is invalid. """ - if revoke_model.is_revoked(self.driver.list_events(token=token), - token): + if revoke_model.is_revoked( + self.driver.list_events(token=token), token + ): raise exception.TokenNotFound(_('Failed to validate token')) def revoke(self, event): diff --git a/keystone/server/__init__.py b/keystone/server/__init__.py index 33e13a9d40..96f27aefbf 100644 --- a/keystone/server/__init__.py +++ b/keystone/server/__init__.py @@ -1,4 +1,3 @@ - # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at @@ -23,14 +22,16 @@ CONF = keystone.conf.CONF LOG = log.getLogger(__name__) -def configure(version=None, config_files=None, - pre_setup_logging_fn=lambda: None): +def configure( + version=None, config_files=None, pre_setup_logging_fn=lambda: None +): keystone.conf.configure() sql.initialize() keystone.conf.set_config_defaults() - CONF(project='keystone', version=version, - default_config_files=config_files) + CONF( + project='keystone', version=version, default_config_files=config_files + ) pre_setup_logging_fn() keystone.conf.setup_logging() @@ -38,11 +39,13 @@ def configure(version=None, config_files=None, if CONF.insecure_debug: LOG.warning( 'insecure_debug is enabled so responses may include sensitive ' - 'information.') + 'information.' + ) -def setup_backends(load_extra_backends_fn=lambda: {}, - startup_application_fn=lambda: None): +def setup_backends( + load_extra_backends_fn=lambda: {}, startup_application_fn=lambda: None +): drivers = backends.load_backends() drivers.update(load_extra_backends_fn()) res = startup_application_fn() diff --git a/keystone/server/backends.py b/keystone/server/backends.py index 04acfa540d..388ea867be 100644 --- a/keystone/server/backends.py +++ b/keystone/server/backends.py @@ -48,16 +48,29 @@ def load_backends(): cache.configure_cache(region=identity.ID_MAPPING_REGION) cache.configure_invalidation_region() - managers = [application_credential.Manager, assignment.Manager, - catalog.Manager, credential.Manager, - credential.provider.Manager, resource.DomainConfigManager, - endpoint_policy.Manager, federation.Manager, - identity.generator.Manager, identity.MappingManager, - identity.Manager, identity.ShadowUsersManager, - limit.Manager, oauth1.Manager, policy.Manager, - resource.Manager, revoke.Manager, assignment.RoleManager, - receipt.provider.Manager, trust.Manager, - token.provider.Manager] + managers = [ + application_credential.Manager, + assignment.Manager, + catalog.Manager, + credential.Manager, + credential.provider.Manager, + resource.DomainConfigManager, + endpoint_policy.Manager, + federation.Manager, + identity.generator.Manager, + identity.MappingManager, + identity.Manager, + identity.ShadowUsersManager, + limit.Manager, + oauth1.Manager, + policy.Manager, + resource.Manager, + revoke.Manager, + assignment.RoleManager, + receipt.provider.Manager, + trust.Manager, + token.provider.Manager, + ] drivers = {d._provides_api: d() for d in managers} diff --git a/keystone/server/flask/__init__.py b/keystone/server/flask/__init__.py index 9167a9c18c..1dae519328 100644 --- a/keystone/server/flask/__init__.py +++ b/keystone/server/flask/__init__.py @@ -27,6 +27,14 @@ from keystone.server.flask.common import unenforced_api # noqa # NOTE(morgan): This allows for from keystone.flask import * and have all the # cool stuff needed to develop new APIs within a module/subsystem -__all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap', - 'base_url', 'construct_json_home_data', - 'construct_resource_map', 'full_url', 'unenforced_api') +__all__ = ( + 'APIBase', + 'JsonHomeData', + 'ResourceBase', + 'ResourceMap', + 'base_url', + 'construct_json_home_data', + 'construct_resource_map', + 'full_url', + 'unenforced_api', +) diff --git a/keystone/server/flask/application.py b/keystone/server/flask/application.py index 7ab8fbf193..32ef2bc0ca 100644 --- a/keystone/server/flask/application.py +++ b/keystone/server/flask/application.py @@ -38,6 +38,7 @@ LOG = log.getLogger(__name__) def fail_gracefully(f): """Log exceptions and aborts.""" + @functools.wraps(f) def wrapper(*args, **kw): try: @@ -68,7 +69,8 @@ def _best_match_language(): if not flask.request.accept_languages: return None return flask.request.accept_languages.best_match( - oslo_i18n.get_available_languages('keystone')) + oslo_i18n.get_available_languages('keystone') + ) def _handle_keystone_exception(error): @@ -85,7 +87,8 @@ def _handle_keystone_exception(error): if isinstance(error, exception.Unauthorized): LOG.warning( "Authorization failed. %(exception)s from %(remote_addr)s", - {'exception': error, 'remote_addr': flask.request.remote_addr}) + {'exception': error, 'remote_addr': flask.request.remote_addr}, + ) else: LOG.exception(str(error)) @@ -98,10 +101,7 @@ def _handle_keystone_exception(error): message = str(message) body = dict( - error={ - 'code': error.code, - 'title': error.title, - 'message': message} + error={'code': error.code, 'title': error.title, 'message': message} ) if isinstance(error, exception.AuthPluginException): @@ -131,8 +131,10 @@ def _handle_unknown_keystone_exception(error): @fail_gracefully def application_factory(name='public'): if name not in ('admin', 'public'): - raise RuntimeError('Application name (for base_url lookup) must be ' - 'either `admin` or `public`.') + raise RuntimeError( + 'Application name (for base_url lookup) must be ' + 'either `admin` or `public`.' + ) app = flask.Flask(name) @@ -160,7 +162,8 @@ def application_factory(name='public'): # NOTE(morgan): Configure the Flask Environment for our needs. app.config.update( # We want to bubble up Flask Exceptions (for now) - PROPAGATE_EXCEPTIONS=True) + PROPAGATE_EXCEPTIONS=True + ) for api in keystone.api.__apis__: for api_bp in api.APIs: @@ -168,12 +171,13 @@ def application_factory(name='public'): # Load in Healthcheck and map it to /healthcheck hc_app = healthcheck.Healthcheck.app_factory( - {}, oslo_config_project='keystone') + {}, oslo_config_project='keystone' + ) # Use the simple form of the dispatch middleware, no extra logic needed # for legacy dispatching. This is to mount /healthcheck at a consistent # place app.wsgi_app = wsgi_dispatcher.DispatcherMiddleware( - app.wsgi_app, - {'/healthcheck': hc_app}) + app.wsgi_app, {'/healthcheck': hc_app} + ) return app diff --git a/keystone/server/flask/common.py b/keystone/server/flask/common.py index 3b99fb6c5f..9a70fc45ca 100644 --- a/keystone/server/flask/common.py +++ b/keystone/server/flask/common.py @@ -49,17 +49,25 @@ _URL_SUBST = re.compile(r'<[^\s:]+:([^>]+)>') CONF = keystone.conf.CONF LOG = log.getLogger(__name__) ResourceMap = collections.namedtuple( - 'resource_map', 'resource, url, alternate_urls, kwargs, json_home_data') + 'resource_map', 'resource, url, alternate_urls, kwargs, json_home_data' +) JsonHomeData = collections.namedtuple( - 'json_home_data', 'rel, status, path_vars') + 'json_home_data', 'rel, status, path_vars' +) _v3_resource_relation = json_home.build_v3_resource_relation -def construct_resource_map(resource, url, resource_kwargs, alternate_urls=None, - rel=None, status=json_home.Status.STABLE, - path_vars=None, - resource_relation_func=_v3_resource_relation): +def construct_resource_map( + resource, + url, + resource_kwargs, + alternate_urls=None, + rel=None, + status=json_home.Status.STABLE, + path_vars=None, + resource_relation_func=_v3_resource_relation, +): """Construct the ResourceMap Named Tuple. :param resource: The flask-RESTful resource class implementing the methods @@ -115,20 +123,30 @@ def construct_resource_map(resource, url, resource_kwargs, alternate_urls=None, """ if rel is not None: jh_data = construct_json_home_data( - rel=rel, status=status, path_vars=path_vars, - resource_relation_func=resource_relation_func) + rel=rel, + status=status, + path_vars=path_vars, + resource_relation_func=resource_relation_func, + ) else: jh_data = None if not url.startswith('/'): url = '/%s' % url return ResourceMap( - resource=resource, url=url, alternate_urls=alternate_urls, - kwargs=resource_kwargs, json_home_data=jh_data) + resource=resource, + url=url, + alternate_urls=alternate_urls, + kwargs=resource_kwargs, + json_home_data=jh_data, + ) -def construct_json_home_data(rel, status=json_home.Status.STABLE, - path_vars=None, - resource_relation_func=_v3_resource_relation): +def construct_json_home_data( + rel, + status=json_home.Status.STABLE, + path_vars=None, + resource_relation_func=_v3_resource_relation, +): rel = resource_relation_func(resource_name=rel) return JsonHomeData(rel=rel, status=status, path_vars=(path_vars or {})) @@ -140,15 +158,18 @@ def _initialize_rbac_enforcement_check(): def _assert_rbac_enforcement_called(resp): # assert is intended to be used to ensure code during development works # as expected, it is fine to be optimized out with `python -O` - msg = ('PROGRAMMING ERROR: enforcement (`keystone.common.rbac_enforcer.' - 'enforcer.RBACEnforcer.enforce_call()`) has not been called; API ' - 'is unenforced.') + msg = ( + 'PROGRAMMING ERROR: enforcement (`keystone.common.rbac_enforcer.' + 'enforcer.RBACEnforcer.enforce_call()`) has not been called; API ' + 'is unenforced.' + ) g = flask.g # NOTE(morgan): OPTIONS is a special case and is handled by flask # internally. We should never be enforcing on OPTIONS calls. if flask.request.method != 'OPTIONS': assert getattr( # nosec - g, enforcer._ENFORCEMENT_CHECK_ATTR, False), msg # nosec + g, enforcer._ENFORCEMENT_CHECK_ATTR, False + ), msg # nosec return resp @@ -239,9 +260,14 @@ class APIBase(object, metaclass=abc.ABCMeta): # The API Blueprint may be directly accessed via this property return self.__blueprint - def __init__(self, blueprint_url_prefix='', api_url_prefix='', - default_mediatype='application/json', decorators=None, - errors=None): + def __init__( + self, + blueprint_url_prefix='', + api_url_prefix='', + default_mediatype='application/json', + decorators=None, + errors=None, + ): self.__before_request_functions_added = False self.__after_request_functions_added = False @@ -254,23 +280,31 @@ class APIBase(object, metaclass=abc.ABCMeta): else: # NOTE(morgan): If the api_url_prefix is empty fall back on the # class-level defined `_api_url_prefix` if it is set. - self._api_url_prefix = (api_url_prefix or - getattr(self, '_api_url_prefix', '')) + self._api_url_prefix = api_url_prefix or getattr( + self, '_api_url_prefix', '' + ) if blueprint_url_prefix and not blueprint_url_prefix.startswith('/'): self._blueprint_url_prefix = self._build_bp_url_prefix( - '/%s' % blueprint_url_prefix) + '/%s' % blueprint_url_prefix + ) else: self._blueprint_url_prefix = self._build_bp_url_prefix( - blueprint_url_prefix) + blueprint_url_prefix + ) self.__blueprint = blueprints.Blueprint( - name=self._name, import_name=self._import_name, - url_prefix=self._blueprint_url_prefix) + name=self._name, + import_name=self._import_name, + url_prefix=self._blueprint_url_prefix, + ) self.__api = flask_restful.Api( - app=self.__blueprint, prefix=self._api_url_prefix, + app=self.__blueprint, + prefix=self._api_url_prefix, default_mediatype=self._default_mediatype, - decorators=decorators, errors=errors) + decorators=decorators, + errors=errors, + ) # NOTE(morgan): Make sure we're using oslo_serialization.jsonutils # instead of the default json serializer. Keystone has data types that @@ -306,19 +340,29 @@ class APIBase(object, metaclass=abc.ABCMeta): r_pfx = getattr(r, 'api_prefix', None) if not c_key or not m_key: - LOG.debug('Unable to add resource %(resource)s to API ' - '%(name)s, both `member_key` and `collection_key` ' - 'must be implemented. [collection_key(%(col_key)s) ' - 'member_key(%(m_key)s)]', - {'resource': r.__name__, - 'name': self._name, 'col_key': c_key, - 'm_key': m_key}) + LOG.debug( + 'Unable to add resource %(resource)s to API ' + '%(name)s, both `member_key` and `collection_key` ' + 'must be implemented. [collection_key(%(col_key)s) ' + 'member_key(%(m_key)s)]', + { + 'resource': r.__name__, + 'name': self._name, + 'col_key': c_key, + 'm_key': m_key, + }, + ) continue if r_pfx != self._api_url_prefix: - LOG.debug('Unable to add resource %(resource)s to API as the ' - 'API Prefixes do not match: %(apfx)r != %(rpfx)r', - {'resource': r.__name__, - 'rpfx': r_pfx, 'apfx': self._api_url_prefix}) + LOG.debug( + 'Unable to add resource %(resource)s to API as the ' + 'API Prefixes do not match: %(apfx)r != %(rpfx)r', + { + 'resource': r.__name__, + 'rpfx': r_pfx, + 'apfx': self._api_url_prefix, + }, + ) continue # NOTE(morgan): The Prefix is automatically added by the API, so @@ -332,43 +376,59 @@ class APIBase(object, metaclass=abc.ABCMeta): member_id_key = '%(member_key)s_id' % {'member_key': m_key} entity_path = '/%(collection)s/' % { - 'collection': c_key, 'member': member_id_key} + 'collection': c_key, + 'member': member_id_key, + } # NOTE(morgan): The json-home form of the entity path is different # from the flask-url routing form. Must also include the prefix - jh_e_path = _URL_SUBST.sub('{\\1}', '%(pfx)s/%(e_path)s' % { - 'pfx': self._api_url_prefix, - 'e_path': entity_path.lstrip('/')}) + jh_e_path = _URL_SUBST.sub( + '{\\1}', + '%(pfx)s/%(e_path)s' + % { + 'pfx': self._api_url_prefix, + 'e_path': entity_path.lstrip('/'), + }, + ) LOG.debug( 'Adding standard routes to API %(name)s for `%(resource)s` ' '(API Prefix: %(prefix)s) [%(collection_path)s, ' - '%(entity_path)s]', { - 'name': self._name, 'resource': r.__class__.__name__, + '%(entity_path)s]', + { + 'name': self._name, + 'resource': r.__class__.__name__, 'collection_path': collection_path, 'entity_path': entity_path, - 'prefix': self._api_url_prefix}) + 'prefix': self._api_url_prefix, + }, + ) self.api.add_resource(r, collection_path, entity_path) # Add JSON Home data resource_rel_func = getattr( - r, 'json_home_resource_rel_func', - json_home.build_v3_resource_relation) - resource_rel_status = getattr( - r, 'json_home_resource_status', None) + r, + 'json_home_resource_rel_func', + json_home.build_v3_resource_relation, + ) + resource_rel_status = getattr(r, 'json_home_resource_status', None) collection_rel_resource_name = getattr( - r, 'json_home_collection_resource_name_override', c_key) + r, 'json_home_collection_resource_name_override', c_key + ) collection_rel = resource_rel_func( - resource_name=collection_rel_resource_name) + resource_name=collection_rel_resource_name + ) # NOTE(morgan): Add the prefix explicitly for JSON Home documents # to the collection path. href_val = '%(pfx)s%(collection_path)s' % { 'pfx': self._api_url_prefix, - 'collection_path': collection_path} + 'collection_path': collection_path, + } # If additional parameters exist in the URL, add them to the # href-vars dict. additional_params = getattr( - r, 'json_home_additional_parameters', {}) + r, 'json_home_additional_parameters', {} + ) if additional_params: # NOTE(morgan): Special case, we have 'additional params' which @@ -381,33 +441,44 @@ class APIBase(object, metaclass=abc.ABCMeta): else: rel_data = {'href': href_val} member_rel_resource_name = getattr( - r, 'json_home_member_resource_name_override', m_key) + r, 'json_home_member_resource_name_override', m_key + ) entity_rel = resource_rel_func( - resource_name=member_rel_resource_name) + resource_name=member_rel_resource_name + ) id_str = member_id_key parameter_rel_func = getattr( - r, 'json_home_parameter_rel_func', - json_home.build_v3_parameter_relation) + r, + 'json_home_parameter_rel_func', + json_home.build_v3_parameter_relation, + ) id_param_rel = parameter_rel_func(parameter_name=id_str) - entity_rel_data = {'href-template': jh_e_path, - 'href-vars': {id_str: id_param_rel}} + entity_rel_data = { + 'href-template': jh_e_path, + 'href-vars': {id_str: id_param_rel}, + } if additional_params: entity_rel_data.setdefault('href-vars', {}).update( - additional_params) + additional_params + ) if resource_rel_status is not None: json_home.Status.update_resource_data( - rel_data, resource_rel_status) + rel_data, resource_rel_status + ) json_home.Status.update_resource_data( - entity_rel_data, resource_rel_status) + entity_rel_data, resource_rel_status + ) json_home.JsonHomeResources.append_resource( - collection_rel, rel_data) + collection_rel, rel_data + ) json_home.JsonHomeResources.append_resource( - entity_rel, entity_rel_data) + entity_rel, entity_rel_data + ) def _add_mapped_resources(self): # Add resource mappings, non-standard resource connections @@ -416,7 +487,8 @@ class APIBase(object, metaclass=abc.ABCMeta): LOG.debug( 'Adding resource routes to API %(name)s: ' '[%(url)r %(kwargs)r]', - {'name': self._name, 'url': r.url, 'kwargs': r.kwargs}) + {'name': self._name, 'url': r.url, 'kwargs': r.kwargs}, + ) urls = [r.url] if r.alternate_urls is not None: for element in r.alternate_urls: @@ -426,14 +498,18 @@ class APIBase(object, metaclass=abc.ABCMeta): '`%(route)s` to API %(name)s because API has a ' 'URL prefix. Only APIs without explicit prefixes ' 'can have alternate URL routes added.', - {'route': element['url'], 'name': self._name} + {'route': element['url'], 'name': self._name}, ) continue LOG.debug( 'Adding additional resource route (alternate) to API ' '%(name)s: [%(url)r %(kwargs)r]', - {'name': self._name, 'url': element['url'], - 'kwargs': r.kwargs}) + { + 'name': self._name, + 'url': element['url'], + 'kwargs': r.kwargs, + }, + ) urls.append(element['url']) if element.get('json_home'): alt_url_json_home_data.append(element['json_home']) @@ -448,7 +524,8 @@ class APIBase(object, metaclass=abc.ABCMeta): # from FLASK, do the conversion here. conv_url = '%(pfx)s/%(url)s' % { 'url': _URL_SUBST.sub('{\\1}', r.url).lstrip('/'), - 'pfx': self._api_url_prefix} + 'pfx': self._api_url_prefix, + } if r.json_home_data.path_vars: resource_data['href-template'] = conv_url @@ -456,16 +533,18 @@ class APIBase(object, metaclass=abc.ABCMeta): else: resource_data['href'] = conv_url json_home.Status.update_resource_data( - resource_data, r.json_home_data.status) + resource_data, r.json_home_data.status + ) json_home.JsonHomeResources.append_resource( - r.json_home_data.rel, - resource_data) + r.json_home_data.rel, resource_data + ) for element in alt_url_json_home_data: # Append the "new" path (resource) data with the old rel # reference. json_home.JsonHomeResources.append_resource( - element.rel, resource_data) + element.rel, resource_data + ) def _register_before_request_functions(self, functions=None): """Register functions to be executed in the `before request` phase. @@ -578,7 +657,8 @@ class _AttributeRaisesError(object): def __init__(self, name): self.__msg = 'PROGRAMMING ERROR: `self.{name}` is not set.'.format( - name=name) + name=name + ) def __get__(self, instance, owner): raise ValueError(self.__msg) @@ -670,11 +750,7 @@ class ResourceBase(flask_restful.Resource): container = {collection: refs} self_url = full_url(flask.request.environ['PATH_INFO']) - container['links'] = { - 'next': None, - 'self': self_url, - 'previous': None - } + container['links'] = {'next': None, 'self': self_url, 'previous': None} if list_limited: container['truncated'] = True @@ -699,13 +775,15 @@ class ResourceBase(flask_restful.Resource): # (e.g. head/get/post/...). api_prefix = api_prefix.format(**flask.request.view_args) collection_element = '/'.join( - [api_prefix, collection_name or cls.collection_key]) + [api_prefix, collection_name or cls.collection_key] + ) self_link = base_url(path='/'.join([collection_element, ref['id']])) ref.setdefault('links', {})['self'] = self_link @classmethod def filter_by_attributes(cls, refs, hints): """Filter a list of references by filter values.""" + def _attr_match(ref_attr, val_attr): """Matche attributes allowing for booleans as strings. @@ -741,7 +819,7 @@ class ResourceBase(flask_restful.Resource): target_value = target_value.lower() if comparator == 'contains': - return (filter_value in target_value) + return filter_value in target_value elif comparator == 'startswith': return target_value.startswith(filter_value) elif comparator == 'endswith': @@ -756,8 +834,11 @@ class ResourceBase(flask_restful.Resource): if f['comparator'] == 'equals': attr = f['name'] value = f['value'] - refs = [r for r in refs if _attr_match( - utils.flatten_dict(r).get(attr), value)] + refs = [ + r + for r in refs + if _attr_match(utils.flatten_dict(r).get(attr), value) + ] else: # It might be an inexact filter refs = [r for r in refs if _inexact_attr_match(f, r)] @@ -804,8 +885,7 @@ class ResourceBase(flask_restful.Resource): val = False if filter_name in flask.request.args: filter_value = flask.request.args.get(filter_name) - if (isinstance(filter_value, str) and - filter_value == '0'): + if isinstance(filter_value, str) and filter_value == '0': val = False else: val = True @@ -860,9 +940,12 @@ class ResourceBase(flask_restful.Resource): if comparator.startswith('i'): case_sensitive = False comparator = comparator[1:] - hints.add_filter(base_key, value, - comparator=comparator, - case_sensitive=case_sensitive) + hints.add_filter( + base_key, + value, + comparator=comparator, + case_sensitive=case_sensitive, + ) # NOTE(henry-nash): If we were to support pagination, we would pull any # pagination directives out of the query_dict here, and add them into @@ -898,7 +981,7 @@ class ResourceBase(flask_restful.Resource): if len(refs) > hints.limit['limit']: # The driver layer wasn't able to truncate it for us, so we must # do it here - return LIMITED, refs[:hints.limit['limit']] + return LIMITED, refs[: hints.limit['limit']] return NOT_LIMITED, refs @@ -936,8 +1019,9 @@ class ResourceBase(flask_restful.Resource): return else: msg = 'No domain information specified as part of list request' - tr_msg = _('No domain information specified as part of list ' - 'request') + tr_msg = _( + 'No domain information specified as part of list ' 'request' + ) LOG.warning(msg) raise exception.Unauthorized(tr_msg) @@ -954,7 +1038,8 @@ class ResourceBase(flask_restful.Resource): # AuthContextMiddleware. auth_context = flask.request.environ.get( - authorization.AUTH_CONTEXT_ENV, {}) + authorization.AUTH_CONTEXT_ENV, {} + ) return auth_context['token'] except KeyError: LOG.warning("Couldn't find the auth context.") @@ -965,17 +1050,21 @@ class ResourceBase(flask_restful.Resource): """Fill in domain_id if not specified in a v3 call.""" if not ref.get('domain_id'): oslo_ctx = flask.request.environ.get( - context.REQUEST_CONTEXT_ENV, None) + context.REQUEST_CONTEXT_ENV, None + ) if oslo_ctx and oslo_ctx.domain_id: # Domain Scoped Token Scenario. ref['domain_id'] = oslo_ctx.domain_id elif oslo_ctx.is_admin: # Legacy "shared" admin token Scenario raise exception.ValidationError( - _('You have tried to create a resource using the admin ' - 'token. As this token is not within a domain you must ' - 'explicitly include a domain for this resource to ' - 'belong to.')) + _( + 'You have tried to create a resource using the admin ' + 'token. As this token is not within a domain you must ' + 'explicitly include a domain for this resource to ' + 'belong to.' + ) + ) else: # TODO(henry-nash): We should issue an exception here since if # a v3 call does not explicitly specify the domain_id in the @@ -993,7 +1082,8 @@ class ResourceBase(flask_restful.Resource): 'default domain, is deprecated as of Liberty. There is no ' 'plan to remove this compatibility, however, future API ' 'versions may remove this, so please specify the domain ' - 'explicitly or use a domain-scoped token.') + 'explicitly or use a domain-scoped token.', + ) ref['domain_id'] = CONF.identity.default_domain_id return ref @@ -1049,8 +1139,10 @@ def unenforced_api(f): logic/varying enforcement logic (such as some of the AUTH paths) where the full enforcement will be implemented directly within the methods. """ + @functools.wraps(f) def wrapper(*args, **kwargs): set_unenforced_ok() return f(*args, **kwargs) + return wrapper diff --git a/keystone/server/flask/core.py b/keystone/server/flask/core.py index 8c0c27a077..03843a3660 100644 --- a/keystone/server/flask/core.py +++ b/keystone/server/flask/core.py @@ -34,29 +34,34 @@ from keystone.server.flask.request_processing.middleware import url_normalize # * "namespace": namespace for the entry_point # * "ep": the entry-point name # * "conf": extra config data for the entry_point (None or Dict) -_Middleware = collections.namedtuple('LoadableMiddleware', - 'namespace, ep, conf') +_Middleware = collections.namedtuple( + 'LoadableMiddleware', 'namespace, ep, conf' +) CONF = keystone.conf.CONF # NOTE(morgan): ORDER HERE IS IMPORTANT! The middleware will process the # request in this list's order. _APP_MIDDLEWARE = ( - _Middleware(namespace='keystone.server_middleware', - ep='cors', - conf={'oslo_config_project': 'keystone'}), - _Middleware(namespace='keystone.server_middleware', - ep='sizelimit', - conf={}), - _Middleware(namespace='keystone.server_middleware', - ep='http_proxy_to_wsgi', - conf={}), - _Middleware(namespace='keystone.server_middleware', - ep='osprofiler', - conf={}), - _Middleware(namespace='keystone.server_middleware', - ep='request_id', - conf={}), + _Middleware( + namespace='keystone.server_middleware', + ep='cors', + conf={'oslo_config_project': 'keystone'}, + ), + _Middleware( + namespace='keystone.server_middleware', ep='sizelimit', conf={} + ), + _Middleware( + namespace='keystone.server_middleware', + ep='http_proxy_to_wsgi', + conf={}, + ), + _Middleware( + namespace='keystone.server_middleware', ep='osprofiler', conf={} + ), + _Middleware( + namespace='keystone.server_middleware', ep='request_id', conf={} + ), ) # NOTE(morgan): ORDER HERE IS IMPORTANT! Each of these middlewares are @@ -77,8 +82,11 @@ def _get_config_files(env=None): dirname = env.get('OS_KEYSTONE_CONFIG_DIR', '').strip() - files = [s.strip() for s in - env.get('OS_KEYSTONE_CONFIG_FILES', '').split(';') if s.strip()] + files = [ + s.strip() + for s in env.get('OS_KEYSTONE_CONFIG_FILES', '').split(';') + if s.strip() + ] if dirname: if not files: @@ -101,9 +109,11 @@ def setup_app_middleware(app): # within the pipeline therefore cannot be magically appended/prepended if CONF.wsgi.debug_middleware: # Add in the Debug Middleware - MW = (_Middleware(namespace='keystone.server_middleware', - ep='debug', - conf={}),) + _APP_MIDDLEWARE + MW = ( + _Middleware( + namespace='keystone.server_middleware', ep='debug', conf={} + ), + ) + _APP_MIDDLEWARE # Apply internal-only Middleware (e.g. AuthContextMiddleware). These # are below all externally loaded middleware in request processing. @@ -122,7 +132,8 @@ def setup_app_middleware(app): # object pointed at "application". We may need to eventually move away # from the "factory" mechanism. loaded = stevedore.DriverManager( - mw.namespace, mw.ep, invoke_on_load=False) + mw.namespace, mw.ep, invoke_on_load=False + ) # NOTE(morgan): global_conf (args[0]) to the factory is always empty # and local_conf (args[1]) will be the mw.conf dict. This allows for # configuration to be passed for middleware such as oslo CORS which @@ -137,18 +148,20 @@ def setup_app_middleware(app): return app -def initialize_application(name, post_log_configured_function=lambda: None, - config_files=None): - possible_topdir = os.path.normpath(os.path.join( - os.path.abspath(__file__), - os.pardir, - os.pardir, - os.pardir, - os.pardir)) +def initialize_application( + name, post_log_configured_function=lambda: None, config_files=None +): + possible_topdir = os.path.normpath( + os.path.join( + os.path.abspath(__file__), + os.pardir, + os.pardir, + os.pardir, + os.pardir, + ) + ) - dev_conf = os.path.join(possible_topdir, - 'etc', - 'keystone.conf') + dev_conf = os.path.join(possible_topdir, 'etc', 'keystone.conf') if not config_files: config_files = None if os.path.exists(dev_conf): @@ -169,7 +182,8 @@ def initialize_application(name, post_log_configured_function=lambda: None, return app _unused, app = keystone.server.setup_backends( - startup_application_fn=loadapp) + startup_application_fn=loadapp + ) # setup OSprofiler notifier and enable the profiling if that is configured # in Keystone configuration file. diff --git a/keystone/server/flask/request_processing/json_body.py b/keystone/server/flask/request_processing/json_body.py index 746d88cfd4..1d3ea40961 100644 --- a/keystone/server/flask/request_processing/json_body.py +++ b/keystone/server/flask/request_processing/json_body.py @@ -30,7 +30,8 @@ def json_body_before_request(): if not flask.request.get_data(): return None elif flask.request.path and flask.request.path.startswith( - '/v3/OS-OAUTH2/'): + '/v3/OS-OAUTH2/' + ): # When the user makes a request to the OAuth2.0 token endpoint, # the user should use the "application/x-www-form-urlencoded" format # with a character encoding of UTF-8 in the HTTP request entity-body. @@ -44,15 +45,18 @@ def json_body_before_request(): # ValidationError message (as expected by our contract) # Explicitly check if the content is supposed to be json. - if (flask.request.is_json - or flask.request.headers.get('Content-Type', '') == ''): + if ( + flask.request.is_json + or flask.request.headers.get('Content-Type', '') == '' + ): json_decoded = flask.request.get_json(force=True) if not isinstance(json_decoded, dict): # In the case that the returned value was not a dict, force # a raise that will be caught the same way that a Decode error # would be handled. raise werkzeug_exceptions.BadRequest( - _('resulting JSON load was not a dict')) + _('resulting JSON load was not a dict') + ) else: # We no longer need enforcement on this API, set unenforced_ok # we already hit a validation error. This is required as the @@ -61,8 +65,9 @@ def json_body_before_request(): # as "unenforced_ok" the assertion check to ensure enforcement # was called would raise up causing a 500 error. ks_flask_common.set_unenforced_ok() - raise exception.ValidationError(attribute='application/json', - target='Content-Type header') + raise exception.ValidationError( + attribute='application/json', target='Content-Type header' + ) except werkzeug_exceptions.BadRequest: # We no longer need enforcement on this API, set unenforced_ok @@ -72,5 +77,6 @@ def json_body_before_request(): # as "unenforced_ok" the assertion check to ensure enforcement # was called would raise up causing a 500 error. ks_flask_common.set_unenforced_ok() - raise exception.ValidationError(attribute='valid JSON', - target='request body') + raise exception.ValidationError( + attribute='valid JSON', target='request body' + ) diff --git a/keystone/server/flask/request_processing/middleware/auth_context.py b/keystone/server/flask/request_processing/middleware/auth_context.py index 3d7c2abb2f..7fe7e9913e 100644 --- a/keystone/server/flask/request_processing/middleware/auth_context.py +++ b/keystone/server/flask/request_processing/middleware/auth_context.py @@ -50,8 +50,7 @@ CONF = keystone.conf.CONF LOG = log.getLogger(__name__) -JSON_ENCODE_CONTENT_TYPES = set(['application/json', - 'application/json-home']) +JSON_ENCODE_CONTENT_TYPES = set(['application/json', 'application/json-home']) # minimum access rules support ACCESS_RULES_MIN_VERSION = token_model.ACCESS_RULES_MIN_VERSION @@ -66,7 +65,8 @@ def best_match_language(req): if not req.accept_language: return None return req.accept_language.best_match( - oslo_i18n.get_available_languages('keystone')) + oslo_i18n.get_available_languages('keystone') + ) def base_url(context): @@ -94,18 +94,23 @@ def middleware_exceptions(method): return method(self, request) except exception.Error as e: LOG.warning(e) - return render_exception(e, request=request, - user_locale=best_match_language(request)) + return render_exception( + e, request=request, user_locale=best_match_language(request) + ) except TypeError as e: LOG.exception(e) - return render_exception(exception.ValidationError(e), - request=request, - user_locale=best_match_language(request)) + return render_exception( + exception.ValidationError(e), + request=request, + user_locale=best_match_language(request), + ) except Exception as e: LOG.exception(e) - return render_exception(exception.UnexpectedError(exception=e), - request=request, - user_locale=best_match_language(request)) + return render_exception( + exception.UnexpectedError(exception=e), + request=request, + user_locale=best_match_language(request), + ) return _inner @@ -120,8 +125,10 @@ def render_response(body=None, status=None, headers=None, method=None): if body is None: body = b'' - status = status or (http.client.NO_CONTENT, - http.client.responses[http.client.NO_CONTENT]) + status = status or ( + http.client.NO_CONTENT, + http.client.responses[http.client.NO_CONTENT], + ) else: content_types = [v for h, v in headers if h == 'Content-Type'] if content_types: @@ -133,8 +140,10 @@ def render_response(body=None, status=None, headers=None, method=None): body = jsonutils.dump_as_bytes(body, cls=utils.SmarterEncoder) if content_type is None: headers.append(('Content-Type', 'application/json')) - status = status or (http.client.OK, - http.client.responses[http.client.OK]) + status = status or ( + http.client.OK, + http.client.responses[http.client.OK], + ) # NOTE(davechen): `mod_wsgi` follows the standards from pep-3333 and # requires the value in response header to be binary type(str) on python2, @@ -166,10 +175,9 @@ def render_response(body=None, status=None, headers=None, method=None): headers = _convert_to_str(headers) - resp = webob.Response(body=body, - status='%d %s' % status, - headerlist=headers, - charset='utf-8') + resp = webob.Response( + body=body, status='%d %s' % status, headerlist=headers, charset='utf-8' + ) if method and method.upper() == 'HEAD': # NOTE(morganfainberg): HEAD requests should return the same status @@ -197,11 +205,13 @@ def render_exception(error, context=None, request=None, user_locale=None): # convert to a string. message = str(message) - body = {'error': { - 'code': error.code, - 'title': error.title, - 'message': message, - }} + body = { + 'error': { + 'code': error.code, + 'title': error.title, + 'message': message, + } + } headers = [] if isinstance(error, exception.AuthPluginException): body['error']['identity'] = error.authentication @@ -217,26 +227,29 @@ def render_exception(error, context=None, request=None, user_locale=None): url = base_url(local_context) headers.append(('WWW-Authenticate', 'Keystone uri="%s"' % url)) - return render_response(status=(error.code, error.title), - body=body, - headers=headers) + return render_response( + status=(error.code, error.title), body=body, headers=headers + ) -class AuthContextMiddleware(provider_api.ProviderAPIMixin, - auth_token.BaseAuthProtocol): +class AuthContextMiddleware( + provider_api.ProviderAPIMixin, auth_token.BaseAuthProtocol +): """Build the authentication context from the request auth token.""" kwargs_to_fetch_token = True def __init__(self, app): - super(AuthContextMiddleware, self).__init__(app, log=LOG, - service_type='identity') + super(AuthContextMiddleware, self).__init__( + app, log=LOG, service_type='identity' + ) self.token = None def fetch_token(self, token, **kwargs): try: self.token = self.token_provider_api.validate_token( - token, access_rules_support=ACCESS_RULES_MIN_VERSION) + token, access_rules_support=ACCESS_RULES_MIN_VERSION + ) return render_token.render_token_response_from_model(self.token) except exception.TokenNotFound: raise auth_token.InvalidToken(_('Could not find token')) @@ -250,10 +263,9 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, tokenless_helper = tokenless_auth.TokenlessAuthHelper(request.environ) (domain_id, project_id, trust_ref, unscoped, system) = ( - tokenless_helper.get_scope()) - user_ref = tokenless_helper.get_mapped_user( - project_id, - domain_id) + tokenless_helper.get_scope() + ) + user_ref = tokenless_helper.get_mapped_user(project_id, domain_id) # NOTE(gyee): if it is an ephemeral user, the # given X.509 SSL client cert does not need to map to @@ -261,10 +273,12 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, if user_ref['type'] == federation_utils.UserType.EPHEMERAL: auth_context = {} auth_context['group_ids'] = user_ref['group_ids'] - auth_context[federation_constants.IDENTITY_PROVIDER] = ( - user_ref[federation_constants.IDENTITY_PROVIDER]) - auth_context[federation_constants.PROTOCOL] = ( - user_ref[federation_constants.PROTOCOL]) + auth_context[federation_constants.IDENTITY_PROVIDER] = user_ref[ + federation_constants.IDENTITY_PROVIDER + ] + auth_context[federation_constants.PROTOCOL] = user_ref[ + federation_constants.PROTOCOL + ] if domain_id and project_id: msg = _('Scoping to both domain and project is not allowed') raise ValueError(msg) @@ -306,19 +320,27 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, issuer = request.environ.get(CONF.tokenless_auth.issuer_attribute) if not issuer: - msg = ('Cannot find client issuer in env by the ' - 'issuer attribute - %s.') + msg = ( + 'Cannot find client issuer in env by the ' + 'issuer attribute - %s.' + ) LOG.info(msg, CONF.tokenless_auth.issuer_attribute) return False if issuer in CONF.tokenless_auth.trusted_issuer: return True - msg = ('The client issuer %(client_issuer)s does not match with ' - 'the trusted issuer %(trusted_issuer)s') + msg = ( + 'The client issuer %(client_issuer)s does not match with ' + 'the trusted issuer %(trusted_issuer)s' + ) LOG.info( - msg, {'client_issuer': issuer, - 'trusted_issuer': CONF.tokenless_auth.trusted_issuer}) + msg, + { + 'client_issuer': issuer, + 'trusted_issuer': CONF.tokenless_auth.trusted_issuer, + }, + ) return False @@ -337,15 +359,18 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, "option presents a significant security risk and should " "not be set. This option is deprecated in favor of using " "'keystone-manage bootstrap' and will be removed in a " - "future release.") + "future release." + ) request.environ[CONTEXT_ENV] = context_env if not context_env.get('is_admin', False): resp = super(AuthContextMiddleware, self).process_request(request) if resp: return resp - if request.token_auth.has_user_token and \ - not request.user_token_valid: + if ( + request.token_auth.has_user_token + and not request.user_token_valid + ): raise exception.Unauthorized(_('Not authorized.')) if request.token_auth.user is not None: request.set_user_headers(request.token_auth.user) @@ -370,9 +395,9 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, request_context.domain_name = token.domain['name'] if token.oauth_scoped: request_context.is_delegated_auth = True - request_context.oauth_consumer_id = ( - token.access_token['consumer_id'] - ) + request_context.oauth_consumer_id = token.access_token[ + 'consumer_id' + ] request_context.oauth_access_token_id = token.access_token_id if token.trust_scoped: request_context.is_delegated_auth = True @@ -388,17 +413,18 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, # The request context stores itself in thread-local memory for logging. if authorization.AUTH_CONTEXT_ENV in request.environ: - msg = ('Auth context already exists in the request ' - 'environment; it will be used for authorization ' - 'instead of creating a new one.') + msg = ( + 'Auth context already exists in the request ' + 'environment; it will be used for authorization ' + 'instead of creating a new one.' + ) LOG.warning(msg) return - kwargs = { - 'authenticated': False, - 'overwrite': True} + kwargs = {'authenticated': False, 'overwrite': True} request_context = context.RequestContext.from_environ( - request.environ, **kwargs) + request.environ, **kwargs + ) request.environ[context.REQUEST_CONTEXT_ENV] = request_context # NOTE(gyee): token takes precedence over SSL client certificates. @@ -418,7 +444,8 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, self.token = PROVIDERS.token_provider_api.validate_token( request.user_token, access_rules_support=request.headers.get( - authorization.ACCESS_RULES_HEADER) + authorization.ACCESS_RULES_HEADER + ), ) self._keystone_specific_values(self.token, request_context) request_context.auth_token = request.user_token @@ -430,7 +457,7 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, 'domain_id': request_context._domain_id, 'domain_name': request_context.domain_name, 'group_ids': request_context.group_ids, - 'token': self.token + 'token': self.token, } auth_context.update(additional) @@ -440,17 +467,31 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, # the credentials for RBAC. Instead, we are using the (Oslo) # request context. So we'll need to set all the necessary # credential attributes in the request context here. - token_attributes = frozenset(( - 'user_id', 'project_id', - 'domain_id', 'user_domain_id', - 'project_domain_id', 'user_domain_name', - 'project_domain_name', 'roles', 'is_admin', - 'project_name', 'domain_name', 'system_scope', - 'is_admin_project', 'service_user_id', - 'service_user_name', 'service_project_id', - 'service_project_name', 'service_user_domain_id' - 'service_user_domain_name', 'service_project_domain_id', - 'service_project_domain_name', 'service_roles')) + token_attributes = frozenset( + ( + 'user_id', + 'project_id', + 'domain_id', + 'user_domain_id', + 'project_domain_id', + 'user_domain_name', + 'project_domain_name', + 'roles', + 'is_admin', + 'project_name', + 'domain_name', + 'system_scope', + 'is_admin_project', + 'service_user_id', + 'service_user_name', + 'service_project_id', + 'service_project_name', + 'service_user_domain_id' 'service_user_domain_name', + 'service_project_domain_id', + 'service_project_domain_name', + 'service_roles', + ) + ) for attr in token_attributes: if attr in auth_context: setattr(request_context, attr, auth_context[attr]) @@ -474,8 +515,10 @@ class AuthContextMiddleware(provider_api.ProviderAPIMixin, @classmethod def factory(cls, global_config, **local_config): """Used for loading in middleware (holdover from paste.deploy).""" + def _factory(app): conf = global_config.copy() conf.update(local_config) return cls(app, **local_config) + return _factory diff --git a/keystone/server/wsgi.py b/keystone/server/wsgi.py index 17072fff6a..d1bc8d43e9 100644 --- a/keystone/server/wsgi.py +++ b/keystone/server/wsgi.py @@ -21,7 +21,8 @@ from keystone.server.flask import core as flask_core # are meant for public consumption def initialize_public_application(): return flask_core.initialize_application( - name='public', config_files=flask_core._get_config_files()) + name='public', config_files=flask_core._get_config_files() + ) # Keystone does not differentiate between "admin" and public with the removal diff --git a/keystone/tests/common/auth.py b/keystone/tests/common/auth.py index c0abf5d386..c5dd62250b 100644 --- a/keystone/tests/common/auth.py +++ b/keystone/tests/common/auth.py @@ -16,10 +16,18 @@ from keystone.i18n import _ class AuthTestMixin(object): """To hold auth building helper functions.""" - def _build_auth_scope(self, system=False, project_id=None, - project_name=None, project_domain_id=None, - project_domain_name=None, domain_id=None, - domain_name=None, trust_id=None, unscoped=None): + def _build_auth_scope( + self, + system=False, + project_id=None, + project_name=None, + project_domain_id=None, + project_domain_name=None, + domain_id=None, + domain_name=None, + trust_id=None, + unscoped=None, + ): scope_data = {} if system: scope_data['system'] = {'all': True} @@ -48,13 +56,22 @@ class AuthTestMixin(object): scope_data['OS-TRUST:trust'] = {} scope_data['OS-TRUST:trust']['id'] = trust_id else: - raise ValueError(_('Programming Error: Invalid arguments supplied ' - 'to build scope.')) + raise ValueError( + _( + 'Programming Error: Invalid arguments supplied ' + 'to build scope.' + ) + ) return scope_data - def _build_user(self, user_id=None, username=None, user_domain_id=None, - user_domain_name=None): + def _build_user( + self, + user_id=None, + username=None, + user_domain_id=None, + user_domain_name=None, + ): user = {} if user_id: user['id'] = user_id @@ -68,51 +85,84 @@ class AuthTestMixin(object): user['domain']['name'] = user_domain_name return user - def _build_auth(self, user_id=None, username=None, user_domain_id=None, - user_domain_name=None, **kwargs): + def _build_auth( + self, + user_id=None, + username=None, + user_domain_id=None, + user_domain_name=None, + **kwargs + ): # NOTE(dstanek): just to ensure sanity in the tests - self.assertEqual(1, len(kwargs), - message='_build_auth requires 1 (and only 1) ' - 'secret type and value') + self.assertEqual( + 1, + len(kwargs), + message='_build_auth requires 1 (and only 1) ' + 'secret type and value', + ) secret_type, secret_value = list(kwargs.items())[0] # NOTE(dstanek): just to ensure sanity in the tests - self.assertIn(secret_type, ('passcode', 'password'), - message="_build_auth only supports 'passcode' " - "and 'password' secret types") + self.assertIn( + secret_type, + ('passcode', 'password'), + message="_build_auth only supports 'passcode' " + "and 'password' secret types", + ) data = {} - data['user'] = self._build_user(user_id=user_id, username=username, - user_domain_id=user_domain_id, - user_domain_name=user_domain_name) + data['user'] = self._build_user( + user_id=user_id, + username=username, + user_domain_id=user_domain_id, + user_domain_name=user_domain_name, + ) data['user'][secret_type] = secret_value return data def _build_token_auth(self, token): return {'id': token} - def _build_app_cred_auth(self, secret, app_cred_id=None, - app_cred_name=None, user_id=None, username=None, - user_domain_id=None, user_domain_name=None): + def _build_app_cred_auth( + self, + secret, + app_cred_id=None, + app_cred_name=None, + user_id=None, + username=None, + user_domain_id=None, + user_domain_name=None, + ): data = {'secret': secret} if app_cred_id: data['id'] = app_cred_id else: data['name'] = app_cred_name - data['user'] = self._build_user(user_id=user_id, - username=username, - user_domain_id=user_domain_id, - user_domain_name=user_domain_name) + data['user'] = self._build_user( + user_id=user_id, + username=username, + user_domain_id=user_domain_id, + user_domain_name=user_domain_name, + ) return data - def build_authentication_request(self, token=None, user_id=None, - username=None, user_domain_id=None, - user_domain_name=None, password=None, - kerberos=False, passcode=None, - app_cred_id=None, app_cred_name=None, - secret=None, **kwargs): + def build_authentication_request( + self, + token=None, + user_id=None, + username=None, + user_domain_id=None, + user_domain_name=None, + password=None, + kerberos=False, + passcode=None, + app_cred_id=None, + app_cred_name=None, + secret=None, + **kwargs + ): """Build auth dictionary. It will create an auth dictionary based on all the arguments @@ -129,21 +179,33 @@ class AuthTestMixin(object): if password and (user_id or username): auth_data['identity']['methods'].append('password') auth_data['identity']['password'] = self._build_auth( - user_id, username, user_domain_id, user_domain_name, - password=password) + user_id, + username, + user_domain_id, + user_domain_name, + password=password, + ) if passcode and (user_id or username): auth_data['identity']['methods'].append('totp') auth_data['identity']['totp'] = self._build_auth( - user_id, username, user_domain_id, user_domain_name, - passcode=passcode) + user_id, + username, + user_domain_id, + user_domain_name, + passcode=passcode, + ) if (app_cred_id or app_cred_name) and secret: auth_data['identity']['methods'].append('application_credential') identity = auth_data['identity'] identity['application_credential'] = self._build_app_cred_auth( - secret, app_cred_id=app_cred_id, app_cred_name=app_cred_name, - user_id=user_id, username=username, + secret, + app_cred_id=app_cred_id, + app_cred_name=app_cred_name, + user_id=user_id, + username=username, user_domain_id=user_domain_id, - user_domain_name=user_domain_name) + user_domain_name=user_domain_name, + ) if kwargs: auth_data['scope'] = self._build_auth_scope(**kwargs) return {'auth': auth_data} diff --git a/keystone/tests/functional/core.py b/keystone/tests/functional/core.py index bd860ba8f6..bcf9dd552f 100644 --- a/keystone/tests/functional/core.py +++ b/keystone/tests/functional/core.py @@ -23,20 +23,22 @@ class BaseTestCase(testtools.TestCase, common_auth.AuthTestMixin): request_headers = {'content-type': 'application/json'} def setUp(self): - self.ADMIN_URL = os.environ.get('KSTEST_ADMIN_URL', - 'http://localhost:5000') - self.PUBLIC_URL = os.environ.get('KSTEST_PUBLIC_URL', - 'http://localhost:5000') + self.ADMIN_URL = os.environ.get( + 'KSTEST_ADMIN_URL', 'http://localhost:5000' + ) + self.PUBLIC_URL = os.environ.get( + 'KSTEST_PUBLIC_URL', 'http://localhost:5000' + ) self.admin = { 'name': os.environ.get('KSTEST_ADMIN_USERNAME', 'admin'), 'password': os.environ.get('KSTEST_ADMIN_PASSWORD', ''), - 'domain_id': os.environ.get('KSTEST_ADMIN_DOMAIN_ID', 'default') + 'domain_id': os.environ.get('KSTEST_ADMIN_DOMAIN_ID', 'default'), } self.user = { 'name': os.environ.get('KSTEST_USER_USERNAME', 'demo'), 'password': os.environ.get('KSTEST_USER_PASSWORD', ''), - 'domain_id': os.environ.get('KSTEST_USER_DOMAIN_ID', 'default') + 'domain_id': os.environ.get('KSTEST_USER_DOMAIN_ID', 'default'), } self.project_id = os.environ.get('KSTEST_PROJECT_ID') @@ -60,12 +62,17 @@ class BaseTestCase(testtools.TestCase, common_auth.AuthTestMixin): """ body = self.build_authentication_request( - username=user['name'], user_domain_name=user['domain_id'], - password=user['password'], project_name=self.project_name, - project_domain_id=self.project_domain_id) - return requests.post(self.PUBLIC_URL + '/v3/auth/tokens', - headers=self.request_headers, - json=body) + username=user['name'], + user_domain_name=user['domain_id'], + password=user['password'], + project_name=self.project_name, + project_domain_id=self.project_domain_id, + ) + return requests.post( + self.PUBLIC_URL + '/v3/auth/tokens', + headers=self.request_headers, + json=body, + ) def get_scoped_token(self, user): """Convenience method for getting scoped token. diff --git a/keystone/tests/functional/shared/test_running.py b/keystone/tests/functional/shared/test_running.py index 22a6dd2c5a..594e4379f8 100644 --- a/keystone/tests/functional/shared/test_running.py +++ b/keystone/tests/functional/shared/test_running.py @@ -17,7 +17,8 @@ from keystone.tests.functional import core as functests is_multiple_choices = testtools.matchers.Equals( - requests.status_codes.codes.multiple_choices) + requests.status_codes.codes.multiple_choices +) is_ok = testtools.matchers.Equals(requests.status_codes.codes.ok) versions = ['v3'] @@ -35,7 +36,9 @@ class TestServerRunning(functests.BaseTestCase): self.assertThat( resp.status_code, testtools.matchers.Annotate( - 'failed for version %s' % version, is_ok)) + 'failed for version %s' % version, is_ok + ), + ) def test_public_responds_with_multiple_choices(self): resp = requests.get(self.PUBLIC_URL) @@ -47,7 +50,9 @@ class TestServerRunning(functests.BaseTestCase): self.assertThat( resp.status_code, testtools.matchers.Annotate( - 'failed for version %s' % version, is_ok)) + 'failed for version %s' % version, is_ok + ), + ) def test_get_user_token(self): token = self.get_scoped_user_token() diff --git a/keystone/tests/hacking/checks.py b/keystone/tests/hacking/checks.py index 13b1429717..cbdb40c2c4 100644 --- a/keystone/tests/hacking/checks.py +++ b/keystone/tests/hacking/checks.py @@ -79,10 +79,14 @@ class CheckForMutableDefaultArgs(BaseASTChecker): CHECK_DESC = 'K001 Using mutable as a function/method default' MUTABLES = ( - ast.List, ast.ListComp, - ast.Dict, ast.DictComp, - ast.Set, ast.SetComp, - ast.Call) + ast.List, + ast.ListComp, + ast.Dict, + ast.DictComp, + ast.Set, + ast.SetComp, + ast.Call, + ) def visit_FunctionDef(self, node): for arg in node.args.defaults: @@ -124,9 +128,7 @@ class CheckForTranslationIssues(BaseASTChecker): LOGGING_CHECK_DESC = 'K005 Using translated string in logging' USING_DEPRECATED_WARN = 'K009 Using the deprecated Logger.warn' LOG_MODULES = ('logging', 'oslo_log.log') - I18N_MODULES = ( - 'keystone.i18n._', - ) + I18N_MODULES = ('keystone.i18n._',) TRANS_HELPER_MAP = { 'debug': None, 'info': '_LI', @@ -181,8 +183,9 @@ class CheckForTranslationIssues(BaseASTChecker): """Return the fully qualified name or a Name or Attribute.""" if isinstance(node, ast.Name): return node.id - elif (isinstance(node, ast.Attribute) - and isinstance(node.value, (ast.Name, ast.Attribute))): + elif isinstance(node, ast.Attribute) and isinstance( + node.value, (ast.Name, ast.Attribute) + ): method_name = node.attr obj_name = self._find_name(node.value) if obj_name is None: @@ -213,18 +216,22 @@ class CheckForTranslationIssues(BaseASTChecker): """ attr_node_types = (ast.Name, ast.Attribute) - if (len(node.targets) != 1 - or not isinstance(node.targets[0], attr_node_types)): + if len(node.targets) != 1 or not isinstance( + node.targets[0], attr_node_types + ): # say no to: "x, y = ..." return super(CheckForTranslationIssues, self).generic_visit(node) target_name = self._find_name(node.targets[0]) - if (isinstance(node.value, ast.BinOp) and - isinstance(node.value.op, ast.Mod)): - if (isinstance(node.value.left, ast.Call) and - isinstance(node.value.left.func, ast.Name) and - node.value.left.func.id in self.i18n_names): + if isinstance(node.value, ast.BinOp) and isinstance( + node.value.op, ast.Mod + ): + if ( + isinstance(node.value.left, ast.Call) + and isinstance(node.value.left.func, ast.Name) + and node.value.left.func.id in self.i18n_names + ): # NOTE(dstanek): this is done to match cases like: # `msg = _('something %s') % x` node = ast.Assign(value=node.value.left) @@ -235,13 +242,16 @@ class CheckForTranslationIssues(BaseASTChecker): return super(CheckForTranslationIssues, self).generic_visit(node) # is this a call to an i18n function? - if (isinstance(node.value.func, ast.Name) - and node.value.func.id in self.i18n_names): + if ( + isinstance(node.value.func, ast.Name) + and node.value.func.id in self.i18n_names + ): self.assignments[target_name] = node.value.func.id return super(CheckForTranslationIssues, self).generic_visit(node) - if (not isinstance(node.value.func, ast.Attribute) - or not isinstance(node.value.func.value, attr_node_types)): + if not isinstance(node.value.func, ast.Attribute) or not isinstance( + node.value.func.value, attr_node_types + ): # function must be an attribute on an object like # logging.getLogger return super(CheckForTranslationIssues, self).generic_visit(node) @@ -249,8 +259,10 @@ class CheckForTranslationIssues(BaseASTChecker): object_name = self._find_name(node.value.func.value) func_name = node.value.func.attr - if (object_name in self.logger_module_names - and func_name == 'getLogger'): + if ( + object_name in self.logger_module_names + and func_name == 'getLogger' + ): self.logger_names.append(target_name) return super(CheckForTranslationIssues, self).generic_visit(node) @@ -266,8 +278,9 @@ class CheckForTranslationIssues(BaseASTChecker): obj_name = self._find_name(node.func.value) method_name = node.func.attr else: # could be Subscript, Call or many more - return (super(CheckForTranslationIssues, self) - .generic_visit(node)) + return super(CheckForTranslationIssues, self).generic_visit( + node + ) # if dealing with a logger the method can't be "warn" if obj_name in self.logger_names and method_name == 'warn': @@ -275,15 +288,19 @@ class CheckForTranslationIssues(BaseASTChecker): self.add_error(msg, message=self.USING_DEPRECATED_WARN) # must be a logger instance and one of the support logging methods - if (obj_name not in self.logger_names - or method_name not in self.TRANS_HELPER_MAP): - return (super(CheckForTranslationIssues, self) - .generic_visit(node)) + if ( + obj_name not in self.logger_names + or method_name not in self.TRANS_HELPER_MAP + ): + return super(CheckForTranslationIssues, self).generic_visit( + node + ) # the call must have arguments if not node.args: - return (super(CheckForTranslationIssues, self) - .generic_visit(node)) + return super(CheckForTranslationIssues, self).generic_visit( + node + ) self._process_log_messages(node) @@ -293,14 +310,15 @@ class CheckForTranslationIssues(BaseASTChecker): msg = node.args[0] # first arg to a logging method is the msg # if first arg is a call to a i18n name - if (isinstance(msg, ast.Call) - and isinstance(msg.func, ast.Name) - and msg.func.id in self.i18n_names): + if ( + isinstance(msg, ast.Call) + and isinstance(msg.func, ast.Name) + and msg.func.id in self.i18n_names + ): self.add_error(msg, message=self.LOGGING_CHECK_DESC) # if the first arg is a reference to a i18n call - elif (isinstance(msg, ast.Name) - and msg.id in self.assignments): + elif isinstance(msg, ast.Name) and msg.id in self.assignments: self.add_error(msg, message=self.LOGGING_CHECK_DESC) @@ -318,11 +336,14 @@ def dict_constructor_with_sequence_copy(logical_line): K008: dict([[i,i] for i in range(3)]) """ - MESSAGE = ("K008 Must use a dict comprehension instead of a dict" - " constructor with a sequence of key-value pairs.") + MESSAGE = ( + "K008 Must use a dict comprehension instead of a dict" + " constructor with a sequence of key-value pairs." + ) - dict_constructor_with_sequence_re = ( - re.compile(r".*\bdict\((\[)?(\(|\[)(?!\{)")) + dict_constructor_with_sequence_re = re.compile( + r".*\bdict\((\[)?(\(|\[)(?!\{)" + ) if dict_constructor_with_sequence_re.match(logical_line): yield (0, MESSAGE) diff --git a/keystone/tests/protection/v3/test_access_rules.py b/keystone/tests/protection/v3/test_access_rules.py index 84c7afa5e7..64b3ef4caf 100644 --- a/keystone/tests/protection/v3/test_access_rules.py +++ b/keystone/tests/protection/v3/test_access_rules.py @@ -37,18 +37,23 @@ class _UserAccessRuleTests(object): 'user_id': self.user_id, 'project_id': self.project_id, 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) with self.test_client() as c: path = '/v3/users/%s/access_rules/%s' % ( - self.user_id, app_cred['access_rules'][0]['id']) + self.user_id, + app_cred['access_rules'][0]['id'], + ) c.get(path, headers=self.headers) def test_user_can_list_their_access_rules(self): @@ -58,18 +63,23 @@ class _UserAccessRuleTests(object): 'user_id': self.user_id, 'project_id': self.project_id, 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': uuid.uuid4().hex, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': uuid.uuid4().hex, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) with self.test_client() as c: - r = c.get('/v3/users/%s/access_rules' % self.user_id, - headers=self.headers) + r = c.get( + '/v3/users/%s/access_rules' % self.user_id, + headers=self.headers, + ) self.assertEqual(len(r.json['access_rules']), 1) def test_user_can_delete_their_access_rules(self): @@ -80,20 +90,26 @@ class _UserAccessRuleTests(object): 'user_id': self.user_id, 'project_id': self.project_id, 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.application_credential_api.delete_application_credential( - app_cred['id']) + app_cred['id'] + ) with self.test_client() as c: path = '/v3/users/%s/access_rules/%s' % ( - self.user_id, access_rule_id) + self.user_id, + access_rule_id, + ) c.delete(path, headers=self.headers) @@ -108,8 +124,9 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) access_rule_id = uuid.uuid4().hex @@ -119,30 +136,36 @@ class _ProjectUsersTests(object): 'user_id': user['id'], 'project_id': project['id'], 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) with self.test_client() as c: path = '/v3/users/%s/access_rules/%s' % ( - user['id'], access_rule_id) + user['id'], + access_rule_id, + ) c.get( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_own_non_existent_access_rule_not_found(self): with self.test_client() as c: c.get( - '/v3/users/%s/access_rules/%s' % ( - self.user_id, uuid.uuid4().hex), + '/v3/users/%s/access_rules/%s' + % (self.user_id, uuid.uuid4().hex), headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) def test_cannot_get_non_existent_access_rule_other_user_forbidden(self): @@ -150,10 +173,10 @@ class _ProjectUsersTests(object): user = PROVIDERS.identity_api.create_user(user) with self.test_client() as c: c.get( - '/v3/users/%s/access_rules/%s' % ( - user['id'], uuid.uuid4().hex), + '/v3/users/%s/access_rules/%s' + % (user['id'], uuid.uuid4().hex), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_access_rules_for_other_users(self): @@ -164,8 +187,9 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) app_cred = { 'id': uuid.uuid4().hex, @@ -173,20 +197,26 @@ class _ProjectUsersTests(object): 'user_id': user['id'], 'project_id': project['id'], 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': uuid.uuid4().hex, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': uuid.uuid4().hex, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) with self.test_client() as c: path = '/v3/users/%s/access_rules' % user['id'] - c.get(path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_delete_access_rules_for_others(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -196,8 +226,9 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) access_rule_id = uuid.uuid4().hex app_cred = { @@ -206,23 +237,30 @@ class _ProjectUsersTests(object): 'user_id': user['id'], 'project_id': project['id'], 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.application_credential_api.delete_application_credential( - app_cred['id']) + app_cred['id'] + ) with self.test_client() as c: path = '/v3/users/%s/access_rules/%s' % ( - user['id'], access_rule_id) + user['id'], + access_rule_id, + ) c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_delete_non_existent_access_rule_other_user_forbidden(self): @@ -230,10 +268,10 @@ class _ProjectUsersTests(object): user = PROVIDERS.identity_api.create_user(user) with self.test_client() as c: c.delete( - '/v3/users/%s/access_rules/%s' % ( - user['id'], uuid.uuid4().hex), + '/v3/users/%s/access_rules/%s' + % (user['id'], uuid.uuid4().hex), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -248,8 +286,9 @@ class _SystemUserAccessRuleTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) app_cred = { @@ -258,19 +297,23 @@ class _SystemUserAccessRuleTests(object): 'user_id': user['id'], 'project_id': project['id'], 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': uuid.uuid4().hex, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': uuid.uuid4().hex, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) with self.test_client() as c: - r = c.get('/v3/users/%s/access_rules' % user['id'], - headers=self.headers) + r = c.get( + '/v3/users/%s/access_rules' % user['id'], headers=self.headers + ) self.assertEqual(1, len(r.json['access_rules'])) def test_user_cannot_get_non_existent_access_rule_not_found(self): @@ -278,16 +321,18 @@ class _SystemUserAccessRuleTests(object): user = PROVIDERS.identity_api.create_user(user) with self.test_client() as c: c.get( - '/v3/users/%s/access_rules/%s' % ( - user['id'], uuid.uuid4().hex), + '/v3/users/%s/access_rules/%s' + % (user['id'], uuid.uuid4().hex), headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserAccessRuleTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserAccessRuleTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -298,16 +343,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -325,8 +369,9 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) access_rule_id = uuid.uuid4().hex @@ -336,23 +381,30 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, 'user_id': user['id'], 'project_id': project['id'], 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.application_credential_api.delete_application_credential( - app_cred['id']) + app_cred['id'] + ) with self.test_client() as c: path = '/v3/users/%s/access_rules/%s' % ( - user['id'], access_rule_id) + user['id'], + access_rule_id, + ) c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_access_rule_forbidden(self): @@ -360,16 +412,18 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, user = PROVIDERS.identity_api.create_user(user) with self.test_client() as c: c.delete( - '/v3/users/%s/access_rules/%s' % ( - user['id'], uuid.uuid4().hex), + '/v3/users/%s/access_rules/%s' + % (user['id'], uuid.uuid4().hex), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserAccessRuleTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserAccessRuleTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -380,16 +434,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -407,8 +460,9 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) access_rule_id = uuid.uuid4().hex @@ -418,31 +472,41 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, 'user_id': user['id'], 'project_id': project['id'], 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.application_credential_api.delete_application_credential( - app_cred['id']) + app_cred['id'] + ) with self.test_client() as c: path = '/v3/users/%s/access_rules/%s' % ( - user['id'], access_rule_id) + user['id'], + access_rule_id, + ) c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) with self.test_client() as c: path = '/v3/users/%s/access_rules/%s' % ( - user['id'], access_rule_id) + user['id'], + access_rule_id, + ) c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_access_rule_forbidden(self): @@ -450,16 +514,18 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, user = PROVIDERS.identity_api.create_user(user) with self.test_client() as c: c.delete( - '/v3/users/%s/access_rules/%s' % ( - user['id'], uuid.uuid4().hex), + '/v3/users/%s/access_rules/%s' + % (user['id'], uuid.uuid4().hex), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserAccessRuleTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserAccessRuleTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -473,7 +539,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -491,8 +557,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) access_rule_id = uuid.uuid4().hex app_cred = { @@ -501,21 +568,27 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, 'user_id': user['id'], 'project_id': project['id'], 'secret': uuid.uuid4().hex, - 'access_rules': [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + 'access_rules': [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ], } PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.application_credential_api.delete_application_credential( - app_cred['id']) + app_cred['id'] + ) with self.test_client() as c: path = '/v3/users/%s/access_rules/%s' % ( - user['id'], access_rule_id) + user['id'], + access_rule_id, + ) c.delete(path, headers=self.headers) def test_user_cannot_delete_non_existent_access_rule_not_found(self): @@ -523,17 +596,19 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, user = PROVIDERS.identity_api.create_user(user) with self.test_client() as c: c.delete( - '/v3/users/%s/access_rules/%s' % ( - user['id'], uuid.uuid4().hex), + '/v3/users/%s/access_rules/%s' + % (user['id'], uuid.uuid4().hex), headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) -class ProjectReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserAccessRuleTests, - _ProjectUsersTests): +class ProjectReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserAccessRuleTests, + _ProjectUsersTests, +): def setUp(self): super(ProjectReaderTests, self).setUp() @@ -544,9 +619,7 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, project_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - project_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(project_reader)['id'] project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id ) @@ -554,14 +627,15 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=project_reader['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -572,10 +646,12 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserAccessRuleTests, - _ProjectUsersTests): +class ProjectMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserAccessRuleTests, + _ProjectUsersTests, +): def setUp(self): super(ProjectMemberTests, self).setUp() @@ -586,9 +662,7 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, project_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - project_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(project_member)['id'] project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id ) @@ -596,14 +670,15 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=project_member['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -614,10 +689,12 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserAccessRuleTests, - _ProjectUsersTests): +class ProjectAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserAccessRuleTests, + _ProjectUsersTests, +): def setUp(self): super(ProjectAdminTests, self).setUp() @@ -639,7 +716,7 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_application_credential.py b/keystone/tests/protection/v3/test_application_credential.py index 5f7c2a202b..d45a3f244c 100644 --- a/keystone/tests/protection/v3/test_application_credential.py +++ b/keystone/tests/protection/v3/test_application_credential.py @@ -32,8 +32,14 @@ PROVIDERS = provider_api.ProviderAPIs class _TestAppCredBase(base_classes.TestCaseWithBootstrap): """Base class for application credential tests.""" - def _new_app_cred_data(self, user_id=None, project_id=None, name=None, - expires=None, system=None): + def _new_app_cred_data( + self, + user_id=None, + project_id=None, + name=None, + expires=None, + system=None, + ): if not user_id: user_id = self.app_cred_user_id if not name: @@ -56,7 +62,7 @@ class _TestAppCredBase(base_classes.TestCaseWithBootstrap): {'id': self.bootstrapper.member_role_id}, ], 'secret': uuid.uuid4().hex, - 'unrestricted': False + 'unrestricted': False, } return app_cred_data @@ -67,27 +73,27 @@ class _TestAppCredBase(base_classes.TestCaseWithBootstrap): new_user_ref = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - app_cred_user_ref = PROVIDERS.identity_api.create_user( - new_user_ref - ) + app_cred_user_ref = PROVIDERS.identity_api.create_user(new_user_ref) self.app_cred_user_id = app_cred_user_ref['id'] self.app_cred_user_password = new_user_ref['password'] app_cred_project_ref = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) self.app_cred_project_id = app_cred_project_ref['id'] PROVIDERS.assignment_api.create_grant( self.bootstrapper.member_role_id, user_id=self.app_cred_user_id, - project_id=self.app_cred_project_id + project_id=self.app_cred_project_id, ) def _create_application_credential(self): app_cred = self._new_app_cred_data() - return \ + return ( PROVIDERS.application_credential_api.create_application_credential( - app_cred) + app_cred + ) + ) def _override_policy(self): # TODO(gyee): Remove this once the deprecated policies in @@ -101,13 +107,17 @@ class _TestAppCredBase(base_classes.TestCaseWithBootstrap): with open(self.policy_file_name, 'w') as f: overridden_policies = { 'identity:get_application_credential': ( - base_policy.RULE_SYSTEM_READER_OR_OWNER), + base_policy.RULE_SYSTEM_READER_OR_OWNER + ), 'identity:list_application_credentials': ( - base_policy.RULE_SYSTEM_READER_OR_OWNER), + base_policy.RULE_SYSTEM_READER_OR_OWNER + ), 'identity:create_application_credential': ( - base_policy.RULE_OWNER), + base_policy.RULE_OWNER + ), 'identity:delete_application_credential': ( - base_policy.RULE_SYSTEM_ADMIN_OR_OWNER), + base_policy.RULE_SYSTEM_ADMIN_OR_OWNER + ), } f.write(jsonutils.dumps(overridden_policies)) @@ -125,62 +135,68 @@ class _DomainAndProjectUserTests(object): self._create_application_credential() with self.test_client() as c: - c.get('/v3/users/%s/application_credentials' % ( - self.app_cred_user_id), - expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + c.get( + '/v3/users/%s/application_credentials' + % (self.app_cred_user_id), + expected_status_code=http.client.FORBIDDEN, + headers=self.headers, + ) def test_user_cannot_get_application_credential(self): app_cred = self._create_application_credential() with self.test_client() as c: - c.get('/v3/users/%s/application_credentials/%s' % ( - self.app_cred_user_id, - app_cred['id']), - expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + c.get( + '/v3/users/%s/application_credentials/%s' + % (self.app_cred_user_id, app_cred['id']), + expected_status_code=http.client.FORBIDDEN, + headers=self.headers, + ) def test_user_cannot_lookup_application_credential(self): app_cred = self._create_application_credential() with self.test_client() as c: - c.get('/v3/users/%s/application_credentials?name=%s' % ( - self.app_cred_user_id, - app_cred['name']), - expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + c.get( + '/v3/users/%s/application_credentials?name=%s' + % (self.app_cred_user_id, app_cred['name']), + expected_status_code=http.client.FORBIDDEN, + headers=self.headers, + ) def test_user_cannot_delete_application_credential(self): app_cred = self._create_application_credential() with self.test_client() as c: c.delete( - '/v3/users/%s/application_credentials/%s' % ( - self.app_cred_user_id, - app_cred['id']), + '/v3/users/%s/application_credentials/%s' + % (self.app_cred_user_id, app_cred['id']), expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + headers=self.headers, + ) def test_user_cannot_lookup_non_existent_application_credential(self): with self.test_client() as c: - c.get('/v3/users/%s/application_credentials?name=%s' % ( - self.app_cred_user_id, - uuid.uuid4().hex), - expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + c.get( + '/v3/users/%s/application_credentials?name=%s' + % (self.app_cred_user_id, uuid.uuid4().hex), + expected_status_code=http.client.FORBIDDEN, + headers=self.headers, + ) def test_user_cannot_create_app_credential_for_another_user(self): # create another user another_user = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - another_user_id = PROVIDERS.identity_api.create_user( - another_user - )['id'] + another_user_id = PROVIDERS.identity_api.create_user(another_user)[ + 'id' + ] app_cred_body = { 'application_credential': unit.new_application_credential_ref( - roles=[{'id': self.bootstrapper.member_role_id}]) + roles=[{'id': self.bootstrapper.member_role_id}] + ) } with self.test_client() as c: @@ -188,7 +204,8 @@ class _DomainAndProjectUserTests(object): '/v3/users/%s/application_credentials' % another_user_id, json=app_cred_body, expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + headers=self.headers, + ) class _SystemUserAndOwnerTests(object): @@ -201,9 +218,10 @@ class _SystemUserAndOwnerTests(object): with self.test_client() as c: r = c.get( - '/v3/users/%s/application_credentials' % ( - self.app_cred_user_id), - headers=self.headers) + '/v3/users/%s/application_credentials' + % (self.app_cred_user_id), + headers=self.headers, + ) self.assertEqual(2, len(r.json['application_credentials'])) def test_user_can_get_application_credential(self): @@ -211,10 +229,10 @@ class _SystemUserAndOwnerTests(object): with self.test_client() as c: r = c.get( - '/v3/users/%s/application_credentials/%s' % ( - self.app_cred_user_id, - app_cred['id']), - headers=self.headers) + '/v3/users/%s/application_credentials/%s' + % (self.app_cred_user_id, app_cred['id']), + headers=self.headers, + ) actual_app_cred = r.json['application_credential'] self.assertEqual(app_cred['id'], actual_app_cred['id']) @@ -223,39 +241,40 @@ class _SystemUserAndOwnerTests(object): with self.test_client() as c: r = c.get( - '/v3/users/%s/application_credentials?name=%s' % ( - self.app_cred_user_id, - app_cred['name']), - headers=self.headers) + '/v3/users/%s/application_credentials?name=%s' + % (self.app_cred_user_id, app_cred['name']), + headers=self.headers, + ) self.assertEqual(1, len(r.json['application_credentials'])) actual_app_cred = r.json['application_credentials'][0] self.assertEqual(app_cred['id'], actual_app_cred['id']) def _test_delete_application_credential( - self, - expected_status_code=http.client.NO_CONTENT): + self, expected_status_code=http.client.NO_CONTENT + ): app_cred = self._create_application_credential() with self.test_client() as c: c.delete( - '/v3/users/%s/application_credentials/%s' % ( - self.app_cred_user_id, - app_cred['id']), + '/v3/users/%s/application_credentials/%s' + % (self.app_cred_user_id, app_cred['id']), expected_status_code=expected_status_code, - headers=self.headers) + headers=self.headers, + ) def test_user_cannot_create_app_credential_for_another_user(self): # create another user another_user = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - another_user_id = PROVIDERS.identity_api.create_user( - another_user - )['id'] + another_user_id = PROVIDERS.identity_api.create_user(another_user)[ + 'id' + ] app_cred_body = { 'application_credential': unit.new_application_credential_ref( - roles=[{'id': self.bootstrapper.member_role_id}]) + roles=[{'id': self.bootstrapper.member_role_id}] + ) } with self.test_client() as c: @@ -263,12 +282,13 @@ class _SystemUserAndOwnerTests(object): '/v3/users/%s/application_credentials' % another_user_id, json=app_cred_body, expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + headers=self.headers, + ) -class SystemReaderTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _SystemUserAndOwnerTests): +class SystemReaderTests( + _TestAppCredBase, common_auth.AuthTestMixin, _SystemUserAndOwnerTests +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -279,16 +299,15 @@ class SystemReaderTests(_TestAppCredBase, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -300,12 +319,13 @@ class SystemReaderTests(_TestAppCredBase, def test_system_reader_cannot_delete_application_credential_for_user(self): self._test_delete_application_credential( - expected_status_code=http.client.FORBIDDEN) + expected_status_code=http.client.FORBIDDEN + ) -class SystemMemberTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _SystemUserAndOwnerTests): +class SystemMemberTests( + _TestAppCredBase, common_auth.AuthTestMixin, _SystemUserAndOwnerTests +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -316,16 +336,15 @@ class SystemMemberTests(_TestAppCredBase, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -337,12 +356,13 @@ class SystemMemberTests(_TestAppCredBase, def test_system_reader_cannot_delete_application_credential_for_user(self): self._test_delete_application_credential( - expected_status_code=http.client.FORBIDDEN) + expected_status_code=http.client.FORBIDDEN + ) -class SystemAdminTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _SystemUserAndOwnerTests): +class SystemAdminTests( + _TestAppCredBase, common_auth.AuthTestMixin, _SystemUserAndOwnerTests +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -354,7 +374,7 @@ class SystemAdminTests(_TestAppCredBase, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -368,9 +388,9 @@ class SystemAdminTests(_TestAppCredBase, self._test_delete_application_credential() -class OwnerTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _SystemUserAndOwnerTests): +class OwnerTests( + _TestAppCredBase, common_auth.AuthTestMixin, _SystemUserAndOwnerTests +): def setUp(self): super(OwnerTests, self).setUp() @@ -393,7 +413,7 @@ class OwnerTests(_TestAppCredBase, auth = self.build_authentication_request( user_id=self.user_id, password=self.app_cred_user_password, - project_id=self.app_cred_project_id + project_id=self.app_cred_project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -413,7 +433,8 @@ class OwnerTests(_TestAppCredBase, '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers=self.headers) + headers=self.headers, + ) def test_owner_can_delete_application_credential(self): self._test_delete_application_credential() @@ -423,13 +444,12 @@ class OwnerTests(_TestAppCredBase, another_user = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - another_user_id = PROVIDERS.identity_api.create_user( - another_user - )['id'] + another_user_id = PROVIDERS.identity_api.create_user(another_user)[ + 'id' + ] auth = self.build_authentication_request( - user_id=another_user_id, - password=another_user['password'] + user_id=another_user_id, password=another_user['password'] ) # authenticate for a token as a completely different user with @@ -445,24 +465,23 @@ class OwnerTests(_TestAppCredBase, # attempt to lookup the application credential as another user with self.test_client() as c: c.get( - '/v3/users/%s/application_credentials/%s' % ( - another_user_id, - app_cred['id']), + '/v3/users/%s/application_credentials/%s' + % (another_user_id, app_cred['id']), expected_status_code=http.client.FORBIDDEN, - headers={'X-Auth-Token': another_user_token}) + headers={'X-Auth-Token': another_user_token}, + ) def test_user_cannot_delete_application_credential_for_another_user(self): # create another user another_user = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - another_user_id = PROVIDERS.identity_api.create_user( - another_user - )['id'] + another_user_id = PROVIDERS.identity_api.create_user(another_user)[ + 'id' + ] auth = self.build_authentication_request( - user_id=another_user_id, - password=another_user['password'] + user_id=another_user_id, password=another_user['password'] ) # authenticate for a token as a completely different user with @@ -478,16 +497,16 @@ class OwnerTests(_TestAppCredBase, # attempt to delete the application credential as another user with self.test_client() as c: c.delete( - '/v3/users/%s/application_credentials/%s' % ( - another_user_id, - app_cred['id']), + '/v3/users/%s/application_credentials/%s' + % (another_user_id, app_cred['id']), expected_status_code=http.client.FORBIDDEN, - headers={'X-Auth-Token': another_user_token}) + headers={'X-Auth-Token': another_user_token}, + ) -class DomainAdminTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _DomainAndProjectUserTests): +class DomainAdminTests( + _TestAppCredBase, common_auth.AuthTestMixin, _DomainAndProjectUserTests +): def setUp(self): super(DomainAdminTests, self).setUp() @@ -505,16 +524,19 @@ class DomainAdminTests(_TestAppCredBase, self.config_fixture.config(group='oslo_policy', enforce_scope=True) domain_admin = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=CONF.identity.default_domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=CONF.identity.default_domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], - domain_id=CONF.identity.default_domain_id + user_id=self.user_id, + password=domain_admin['password'], + domain_id=CONF.identity.default_domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -525,9 +547,9 @@ class DomainAdminTests(_TestAppCredBase, self.headers = {'X-Auth-Token': self.token_id} -class DomainReaderTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _DomainAndProjectUserTests): +class DomainReaderTests( + _TestAppCredBase, common_auth.AuthTestMixin, _DomainAndProjectUserTests +): def setUp(self): super(DomainReaderTests, self).setUp() @@ -545,16 +567,19 @@ class DomainReaderTests(_TestAppCredBase, self.config_fixture.config(group='oslo_policy', enforce_scope=True) domain_admin = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - domain_id=CONF.identity.default_domain_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + domain_id=CONF.identity.default_domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], - domain_id=CONF.identity.default_domain_id + user_id=self.user_id, + password=domain_admin['password'], + domain_id=CONF.identity.default_domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -565,9 +590,9 @@ class DomainReaderTests(_TestAppCredBase, self.headers = {'X-Auth-Token': self.token_id} -class DomainMemberTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _DomainAndProjectUserTests): +class DomainMemberTests( + _TestAppCredBase, common_auth.AuthTestMixin, _DomainAndProjectUserTests +): def setUp(self): super(DomainMemberTests, self).setUp() @@ -585,16 +610,19 @@ class DomainMemberTests(_TestAppCredBase, self.config_fixture.config(group='oslo_policy', enforce_scope=True) domain_admin = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - domain_id=CONF.identity.default_domain_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + domain_id=CONF.identity.default_domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], - domain_id=CONF.identity.default_domain_id + user_id=self.user_id, + password=domain_admin['password'], + domain_id=CONF.identity.default_domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -605,9 +633,9 @@ class DomainMemberTests(_TestAppCredBase, self.headers = {'X-Auth-Token': self.token_id} -class ProjectAdminTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _DomainAndProjectUserTests): +class ProjectAdminTests( + _TestAppCredBase, common_auth.AuthTestMixin, _DomainAndProjectUserTests +): def setUp(self): super(ProjectAdminTests, self).setUp() @@ -625,18 +653,20 @@ class ProjectAdminTests(_TestAppCredBase, self.config_fixture.config(group='oslo_policy', enforce_scope=True) project_admin = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) self.user_id = PROVIDERS.identity_api.create_user(project_admin)['id'] # even project admin of project where the app credential # is intended for cannot perform app credential operations PROVIDERS.assignment_api.create_grant( self.bootstrapper.admin_role_id, user_id=self.user_id, - project_id=self.app_cred_project_id + project_id=self.app_cred_project_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=project_admin['password'], - project_id=self.app_cred_project_id + user_id=self.user_id, + password=project_admin['password'], + project_id=self.app_cred_project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -647,9 +677,9 @@ class ProjectAdminTests(_TestAppCredBase, self.headers = {'X-Auth-Token': self.token_id} -class ProjectReaderTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _DomainAndProjectUserTests): +class ProjectReaderTests( + _TestAppCredBase, common_auth.AuthTestMixin, _DomainAndProjectUserTests +): def setUp(self): super(ProjectReaderTests, self).setUp() @@ -667,18 +697,20 @@ class ProjectReaderTests(_TestAppCredBase, self.config_fixture.config(group='oslo_policy', enforce_scope=True) project_admin = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) self.user_id = PROVIDERS.identity_api.create_user(project_admin)['id'] # even project admin of project where the app credential # is intended for cannot perform app credential operations PROVIDERS.assignment_api.create_grant( self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=self.app_cred_project_id + project_id=self.app_cred_project_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=project_admin['password'], - project_id=self.app_cred_project_id + user_id=self.user_id, + password=project_admin['password'], + project_id=self.app_cred_project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -689,9 +721,9 @@ class ProjectReaderTests(_TestAppCredBase, self.headers = {'X-Auth-Token': self.token_id} -class ProjectMemberTests(_TestAppCredBase, - common_auth.AuthTestMixin, - _DomainAndProjectUserTests): +class ProjectMemberTests( + _TestAppCredBase, common_auth.AuthTestMixin, _DomainAndProjectUserTests +): def setUp(self): super(ProjectMemberTests, self).setUp() @@ -709,18 +741,20 @@ class ProjectMemberTests(_TestAppCredBase, self.config_fixture.config(group='oslo_policy', enforce_scope=True) project_admin = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) self.user_id = PROVIDERS.identity_api.create_user(project_admin)['id'] # even project admin of project where the app credential # is intended for cannot perform app credential operations PROVIDERS.assignment_api.create_grant( self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.app_cred_project_id + project_id=self.app_cred_project_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=project_admin['password'], - project_id=self.app_cred_project_id + user_id=self.user_id, + password=project_admin['password'], + project_id=self.app_cred_project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_assignment.py b/keystone/tests/protection/v3/test_assignment.py index 399cf6325e..9da31ddb87 100644 --- a/keystone/tests/protection/v3/test_assignment.py +++ b/keystone/tests/protection/v3/test_assignment.py @@ -54,7 +54,7 @@ class _AssignmentTestUtilities(object): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) # create a user+project role assignment. @@ -126,48 +126,64 @@ class _SystemUserTests(object): assignments = self._setup_test_role_assignments() # this assignment is created by keystone-manage bootstrap - self.expected.append({ - 'user_id': self.bootstrapper.admin_user_id, - 'project_id': self.bootstrapper.project_id, - 'role_id': self.bootstrapper.admin_role_id - }) + self.expected.append( + { + 'user_id': self.bootstrapper.admin_user_id, + 'project_id': self.bootstrapper.project_id, + 'role_id': self.bootstrapper.admin_role_id, + } + ) # this assignment is created by keystone-manage bootstrap - self.expected.append({ - 'user_id': self.bootstrapper.admin_user_id, - 'system': 'all', - 'role_id': self.bootstrapper.admin_role_id - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) + self.expected.append( + { + 'user_id': self.bootstrapper.admin_user_id, + 'system': 'all', + 'role_id': self.bootstrapper.admin_role_id, + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'project_id': assignments['project_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'domain_id': assignments['domain_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'project_id': assignments['project_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'domain_id': assignments['domain_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) with self.test_client() as c: r = c.get('/v3/role_assignments', headers=self.headers) @@ -182,48 +198,64 @@ class _SystemUserTests(object): assignments = self._setup_test_role_assignments() # this assignment is created by keystone-manage bootstrap - self.expected.append({ - 'user_id': self.bootstrapper.admin_user_id, - 'project_id': self.bootstrapper.project_id, - 'role_id': self.bootstrapper.admin_role_id - }) + self.expected.append( + { + 'user_id': self.bootstrapper.admin_user_id, + 'project_id': self.bootstrapper.project_id, + 'role_id': self.bootstrapper.admin_role_id, + } + ) # this assignment is created by keystone-manage bootstrap - self.expected.append({ - 'user_id': self.bootstrapper.admin_user_id, - 'system': 'all', - 'role_id': self.bootstrapper.admin_role_id - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) + self.expected.append( + { + 'user_id': self.bootstrapper.admin_user_id, + 'system': 'all', + 'role_id': self.bootstrapper.admin_role_id, + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'project_id': assignments['project_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'domain_id': assignments['domain_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'project_id': assignments['project_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'domain_id': assignments['domain_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) with self.test_client() as c: r = c.get( @@ -242,20 +274,20 @@ class _SystemUserTests(object): { 'user_id': assignments['user_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] - } + 'role_id': assignments['role_id'], + }, ] project_id = assignments['project_id'] with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.project.id=%s' % project_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -268,20 +300,20 @@ class _SystemUserTests(object): { 'user_id': assignments['user_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] - } + 'role_id': assignments['role_id'], + }, ] domain_id = assignments['domain_id'] with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.domain.id=%s' % domain_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -292,26 +324,31 @@ class _SystemUserTests(object): assignments = self._setup_test_role_assignments() # this assignment is created by keystone-manage bootstrap - self.expected.append({ - 'user_id': self.bootstrapper.admin_user_id, - 'system': 'all', - 'role_id': self.bootstrapper.admin_role_id - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) + self.expected.append( + { + 'user_id': self.bootstrapper.admin_user_id, + 'system': 'all', + 'role_id': self.bootstrapper.admin_role_id, + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) with self.test_client() as c: r = c.get( - '/v3/role_assignments?scope.system=all', - headers=self.headers + '/v3/role_assignments?scope.system=all', headers=self.headers ) self.assertEqual( len(self.expected), len(r.json['role_assignments']) @@ -327,25 +364,25 @@ class _SystemUserTests(object): { 'user_id': assignments['user_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'user_id': assignments['user_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'user_id': assignments['user_id'], 'system': 'all', - 'role_id': assignments['role_id'] - } + 'role_id': assignments['role_id'], + }, ] user_id = assignments['user_id'] with self.test_client() as c: r = c.get( '/v3/role_assignments?user.id=%s' % user_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -358,25 +395,25 @@ class _SystemUserTests(object): { 'group_id': assignments['group_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'system': 'all', - 'role_id': assignments['role_id'] - } + 'role_id': assignments['role_id'], + }, ] group_id = assignments['group_id'] with self.test_client() as c: r = c.get( '/v3/role_assignments?group.id=%s' % group_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -385,45 +422,60 @@ class _SystemUserTests(object): def test_user_can_filter_role_assignments_by_role(self): assignments = self._setup_test_role_assignments() - self.expected = [ra for ra in self.expected - if ra['role_id'] == assignments['role_id']] - self.expected.append({ - 'user_id': assignments['user_id'], - 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'user_id': assignments['user_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) + self.expected = [ + ra + for ra in self.expected + if ra['role_id'] == assignments['role_id'] + ] + self.expected.append( + { + 'user_id': assignments['user_id'], + 'project_id': assignments['project_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'domain_id': assignments['domain_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'user_id': assignments['user_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'project_id': assignments['project_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'domain_id': assignments['domain_id'], + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) role_id = assignments['role_id'] with self.test_client() as c: r = c.get( '/v3/role_assignments?role.id=%s&include_names=True' % role_id, - headers=self.headers + headers=self.headers, ) self.assertEqual( len(self.expected), len(r.json['role_assignments']) @@ -438,12 +490,12 @@ class _SystemUserTests(object): { 'user_id': assignments['user_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, ] @@ -451,7 +503,7 @@ class _SystemUserTests(object): qs = (assignments['project_id'], assignments['role_id']) r = c.get( '/v3/role_assignments?scope.project.id=%s&role.id=%s' % qs, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -464,12 +516,12 @@ class _SystemUserTests(object): { 'user_id': assignments['user_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, ] qs = (assignments['domain_id'], assignments['role_id']) @@ -477,7 +529,7 @@ class _SystemUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.domain.id=%s&role.id=%s' % qs, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -486,24 +538,31 @@ class _SystemUserTests(object): def test_user_can_filter_role_assignments_by_system_and_role(self): assignments = self._setup_test_role_assignments() - self.expected = [ra for ra in self.expected - if ra['role_id'] == assignments['role_id']] - self.expected.append({ - 'user_id': assignments['user_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) - self.expected.append({ - 'group_id': assignments['group_id'], - 'system': 'all', - 'role_id': assignments['role_id'] - }) + self.expected = [ + ra + for ra in self.expected + if ra['role_id'] == assignments['role_id'] + ] + self.expected.append( + { + 'user_id': assignments['user_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': assignments['group_id'], + 'system': 'all', + 'role_id': assignments['role_id'], + } + ) role_id = assignments['role_id'] with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.system=all&role.id=%s' % role_id, - headers=self.headers + headers=self.headers, ) self.assertEqual( len(self.expected), len(r.json['role_assignments']) @@ -518,25 +577,25 @@ class _SystemUserTests(object): { 'user_id': assignments['user_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'user_id': assignments['user_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'user_id': assignments['user_id'], 'system': 'all', - 'role_id': assignments['role_id'] - } + 'role_id': assignments['role_id'], + }, ] qs = (assignments['user_id'], assignments['role_id']) with self.test_client() as c: r = c.get( '/v3/role_assignments?user.id=%s&role.id=%s' % qs, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -549,25 +608,25 @@ class _SystemUserTests(object): { 'group_id': assignments['group_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'system': 'all', - 'role_id': assignments['role_id'] - } + 'role_id': assignments['role_id'], + }, ] with self.test_client() as c: qs = (assignments['group_id'], assignments['role_id']) r = c.get( '/v3/role_assignments?group.id=%s&role.id=%s' % qs, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -580,7 +639,7 @@ class _SystemUserTests(object): { 'user_id': assignments['user_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], } ] qs = (assignments['project_id'], assignments['user_id']) @@ -588,7 +647,7 @@ class _SystemUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.project.id=%s&user.id=%s' % qs, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -601,7 +660,7 @@ class _SystemUserTests(object): { 'group_id': assignments['group_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], } ] qs = (assignments['project_id'], assignments['group_id']) @@ -609,7 +668,7 @@ class _SystemUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.project.id=%s&group.id=%s' % qs, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -622,7 +681,7 @@ class _SystemUserTests(object): { 'user_id': assignments['user_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], } ] qs = (assignments['domain_id'], assignments['user_id']) @@ -630,7 +689,7 @@ class _SystemUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.domain.id=%s&user.id=%s' % qs, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -643,7 +702,7 @@ class _SystemUserTests(object): { 'group_id': assignments['group_id'], 'domain_id': assignments['domain_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], } ] qs = (assignments['domain_id'], assignments['group_id']) @@ -651,7 +710,7 @@ class _SystemUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.domain.id=%s&group.id=%s' % qs, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -665,36 +724,40 @@ class _SystemUserTests(object): ) project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id, - parent_id=assignments['project_id']) + unit.new_project_ref( + domain_id=CONF.identity.default_domain_id, + parent_id=assignments['project_id'], + ), ) PROVIDERS.assignment_api.create_grant( assignments['role_id'], user_id=user['id'], - project_id=project['id'] + project_id=project['id'], ) expected = [ { 'user_id': assignments['user_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': assignments['group_id'], 'project_id': assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'user_id': user['id'], 'project_id': project['id'], - 'role_id': assignments['role_id'] - } + 'role_id': assignments['role_id'], + }, ] with self.test_client() as c: r = c.get( - ('/v3/role_assignments?scope.project.id=%s&include_subtree' % - assignments['project_id']), - headers=self.headers + ( + '/v3/role_assignments?scope.project.id=%s&include_subtree' + % assignments['project_id'] + ), + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -720,8 +783,7 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) # create a user+project role assignment. @@ -755,26 +817,34 @@ class _DomainUserTests(object): self._setup_test_role_assignments() domain_assignments = self._setup_test_role_assignments_for_domain() - self.expected.append({ - 'user_id': domain_assignments['user_id'], - 'domain_id': self.domain_id, - 'role_id': domain_assignments['role_id'] - }) - self.expected.append({ - 'user_id': domain_assignments['user_id'], - 'project_id': domain_assignments['project_id'], - 'role_id': domain_assignments['role_id'] - }) - self.expected.append({ - 'group_id': domain_assignments['group_id'], - 'domain_id': self.domain_id, - 'role_id': domain_assignments['role_id'] - }) - self.expected.append({ - 'group_id': domain_assignments['group_id'], - 'project_id': domain_assignments['project_id'], - 'role_id': domain_assignments['role_id'] - }) + self.expected.append( + { + 'user_id': domain_assignments['user_id'], + 'domain_id': self.domain_id, + 'role_id': domain_assignments['role_id'], + } + ) + self.expected.append( + { + 'user_id': domain_assignments['user_id'], + 'project_id': domain_assignments['project_id'], + 'role_id': domain_assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': domain_assignments['group_id'], + 'domain_id': self.domain_id, + 'role_id': domain_assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': domain_assignments['group_id'], + 'project_id': domain_assignments['project_id'], + 'role_id': domain_assignments['role_id'], + } + ) with self.test_client() as c: r = c.get('/v3/role_assignments', headers=self.headers) @@ -793,13 +863,13 @@ class _DomainUserTests(object): { 'user_id': domain_assignments['user_id'], 'project_id': domain_assignments['project_id'], - 'role_id': domain_assignments['role_id'] + 'role_id': domain_assignments['role_id'], }, { 'group_id': domain_assignments['group_id'], 'project_id': domain_assignments['project_id'], - 'role_id': domain_assignments['role_id'] - } + 'role_id': domain_assignments['role_id'], + }, ] project_id = domain_assignments['project_id'] @@ -807,7 +877,7 @@ class _DomainUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.project.id=%s' % project_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -821,21 +891,25 @@ class _DomainUserTests(object): self._setup_test_role_assignments() domain_assignments = self._setup_test_role_assignments_for_domain() - self.expected.append({ - 'user_id': domain_assignments['user_id'], - 'domain_id': self.domain_id, - 'role_id': domain_assignments['role_id'] - }) - self.expected.append({ - 'group_id': domain_assignments['group_id'], - 'domain_id': self.domain_id, - 'role_id': domain_assignments['role_id'] - }) + self.expected.append( + { + 'user_id': domain_assignments['user_id'], + 'domain_id': self.domain_id, + 'role_id': domain_assignments['role_id'], + } + ) + self.expected.append( + { + 'group_id': domain_assignments['group_id'], + 'domain_id': self.domain_id, + 'role_id': domain_assignments['role_id'], + } + ) with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.domain.id=%s' % self.domain_id, - headers=self.headers + headers=self.headers, ) self.assertEqual( len(self.expected), len(r.json['role_assignments']) @@ -852,13 +926,13 @@ class _DomainUserTests(object): { 'user_id': domain_assignments['user_id'], 'domain_id': self.domain_id, - 'role_id': domain_assignments['role_id'] + 'role_id': domain_assignments['role_id'], }, { 'user_id': domain_assignments['user_id'], 'project_id': domain_assignments['project_id'], - 'role_id': domain_assignments['role_id'] - } + 'role_id': domain_assignments['role_id'], + }, ] user_id = domain_assignments['user_id'] @@ -866,7 +940,7 @@ class _DomainUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?user.id=%s' % user_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -881,13 +955,13 @@ class _DomainUserTests(object): { 'group_id': domain_assignments['group_id'], 'domain_id': self.domain_id, - 'role_id': domain_assignments['role_id'] + 'role_id': domain_assignments['role_id'], }, { 'group_id': domain_assignments['group_id'], 'project_id': domain_assignments['project_id'], - 'role_id': domain_assignments['role_id'] - } + 'role_id': domain_assignments['role_id'], + }, ] group_id = domain_assignments['group_id'] @@ -895,7 +969,7 @@ class _DomainUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?group.id=%s' % group_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -908,8 +982,7 @@ class _DomainUserTests(object): with self.test_client() as c: r = c.get( - '/v3/role_assignments?scope.system=all', - headers=self.headers + '/v3/role_assignments?scope.system=all', headers=self.headers ) self.assertEqual(0, len(r.json['role_assignments'])) @@ -919,7 +992,7 @@ class _DomainUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.domain.id=%s' % domain, - headers=self.headers + headers=self.headers, ) self.assertEqual([], r.json['role_assignments']) @@ -934,7 +1007,7 @@ class _DomainUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?scope.project.id=%s' % project_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(0, len(r.json['role_assignments'])) @@ -950,7 +1023,7 @@ class _DomainUserTests(object): with self.test_client() as c: r = c.get( '/v3/role_assignments?user.id=%s' % user_id, - headers=self.headers + headers=self.headers, ) self.assertEqual(0, len(r.json['role_assignments'])) @@ -978,36 +1051,40 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id, - parent_id=domain_assignments['project_id']) + unit.new_project_ref( + domain_id=self.domain_id, + parent_id=domain_assignments['project_id'], + ), ) PROVIDERS.assignment_api.create_grant( assignments['role_id'], user_id=user['id'], - project_id=project['id'] + project_id=project['id'], ) expected = [ { 'user_id': domain_assignments['user_id'], 'project_id': domain_assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'group_id': domain_assignments['group_id'], 'project_id': domain_assignments['project_id'], - 'role_id': assignments['role_id'] + 'role_id': assignments['role_id'], }, { 'user_id': user['id'], 'project_id': project['id'], - 'role_id': assignments['role_id'] - } + 'role_id': assignments['role_id'], + }, ] with self.test_client() as c: r = c.get( - ('/v3/role_assignments?scope.project.id=%s&include_subtree' % - domain_assignments['project_id']), - headers=self.headers + ( + '/v3/role_assignments?scope.project.id=%s&include_subtree' + % domain_assignments['project_id'] + ), + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -1018,10 +1095,12 @@ class _DomainUserTests(object): assignments = self._setup_test_role_assignments() with self.test_client() as c: c.get( - ('/v3/role_assignments?scope.project.id=%s&include_subtree' % - assignments['project_id']), + ( + '/v3/role_assignments?scope.project.id=%s&include_subtree' + % assignments['project_id'] + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -1030,8 +1109,9 @@ class _ProjectUserTests(object): def test_user_cannot_list_all_assignments_in_their_project(self): with self.test_client() as c: c.get( - '/v3/role_assignments', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/role_assignments', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_role_assignments_by_user_of_project(self): @@ -1042,7 +1122,7 @@ class _ProjectUserTests(object): c.get( '/v3/role_assignments?user.id=%s' % user_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_role_assignments_by_group_of_project(self): @@ -1053,7 +1133,7 @@ class _ProjectUserTests(object): c.get( '/v3/role_assignments?group.id=%s' % group_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_role_assignments_by_system(self): @@ -1061,30 +1141,27 @@ class _ProjectUserTests(object): c.get( '/v3/role_assignments?scope.system=all', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_role_assignments_by_domain(self): with self.test_client() as c: c.get( - '/v3/role_assignments?scope.domain.id=%s' - % self.domain_id, + '/v3/role_assignments?scope.domain.id=%s' % self.domain_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_role_assignments_by_other_project(self): project1 = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) with self.test_client() as c: c.get( - '/v3/role_assignments?scope.project.id=%s' - % project1, + '/v3/role_assignments?scope.project.id=%s' % project1, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_role_assignments_by_other_project_user(self): @@ -1099,7 +1176,7 @@ class _ProjectUserTests(object): c.get( '/v3/role_assignments?user.id=%s' % user_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_role_assignments_by_other_project_group(self): @@ -1114,7 +1191,7 @@ class _ProjectUserTests(object): c.get( '/v3/role_assignments?group.id=%s' % group_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -1125,27 +1202,32 @@ class _ProjectReaderMemberTests(object): ) project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id, - parent_id=self.project_id) + unit.new_project_ref( + domain_id=self.domain_id, parent_id=self.project_id + ), ) PROVIDERS.assignment_api.create_grant( self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + project_id=project['id'], ) with self.test_client() as c: c.get( - ('/v3/role_assignments?scope.project.id=%s&include_subtree' % - self.project_id), + ( + '/v3/role_assignments?scope.project.id=%s&include_subtree' + % self.project_id + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _SystemUserTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _SystemUserTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -1156,9 +1238,7 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) @@ -1167,13 +1247,14 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, { 'user_id': self.user_id, 'system': 'all', - 'role_id': self.bootstrapper.reader_role_id + 'role_id': self.bootstrapper.reader_role_id, } ] auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -1184,10 +1265,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _SystemUserTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _SystemUserTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -1198,9 +1281,7 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) @@ -1209,13 +1290,14 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, { 'user_id': self.user_id, 'system': 'all', - 'role_id': self.bootstrapper.member_role_id + 'role_id': self.bootstrapper.member_role_id, } ] auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -1226,10 +1308,12 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _SystemUserTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _SystemUserTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -1241,8 +1325,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, self.expected = [] auth = self.build_authentication_request( - user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + user_id=self.user_id, + password=self.bootstrapper.admin_password, + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -1253,10 +1338,12 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _DomainUserTests): +class DomainReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _DomainUserTests, +): def setUp(self): super(DomainReaderTests, self).setUp() @@ -1271,19 +1358,22 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, domain_reader = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_reader)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) self.expected = [ # assignment of the user running the test case { 'user_id': self.user_id, 'domain_id': self.domain_id, - 'role_id': self.bootstrapper.reader_role_id - }] + 'role_id': self.bootstrapper.reader_role_id, + } + ] auth = self.build_authentication_request( - user_id=self.user_id, password=domain_reader['password'], + user_id=self.user_id, + password=domain_reader['password'], domain_id=self.domain_id, ) @@ -1295,10 +1385,12 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _DomainUserTests): +class DomainMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _DomainUserTests, +): def setUp(self): super(DomainMemberTests, self).setUp() @@ -1313,20 +1405,23 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, domain_user = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) self.expected = [ # assignment of the user running the test case { 'user_id': self.user_id, 'domain_id': self.domain_id, - 'role_id': self.bootstrapper.member_role_id - }] + 'role_id': self.bootstrapper.member_role_id, + } + ] auth = self.build_authentication_request( - user_id=self.user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -1337,10 +1432,12 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _DomainUserTests): +class DomainAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _DomainUserTests, +): def _override_policy(self): # TODO(lbragstad): Remove this once the deprecated policies in @@ -1358,7 +1455,7 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ), 'identity:list_role_assignments_for_tree': ( rp.SYSTEM_READER_OR_PROJECT_DOMAIN_READER_OR_PROJECT_ADMIN - ) + ), } f.write(jsonutils.dumps(overridden_policies)) @@ -1382,19 +1479,22 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) self.expected = [ # assignment of the user running the test case { 'user_id': self.user_id, 'domain_id': self.domain_id, - 'role_id': self.bootstrapper.admin_role_id - }] + 'role_id': self.bootstrapper.admin_role_id, + } + ] auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], + user_id=self.user_id, + password=domain_admin['password'], domain_id=self.domain_id, ) @@ -1406,11 +1506,13 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _ProjectUserTests, - _ProjectReaderMemberTests): +class ProjectReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _ProjectUserTests, + _ProjectReaderMemberTests, +): def setUp(self): super(ProjectReaderTests, self).setUp() @@ -1430,8 +1532,9 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, project_reader = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(project_reader)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=self.project_id, ) self.expected = [ @@ -1439,11 +1542,13 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, { 'user_id': self.user_id, 'project_id': self.project_id, - 'role_id': self.bootstrapper.reader_role_id - }] + 'role_id': self.bootstrapper.reader_role_id, + } + ] auth = self.build_authentication_request( - user_id=self.user_id, password=project_reader['password'], + user_id=self.user_id, + password=project_reader['password'], project_id=self.project_id, ) @@ -1455,11 +1560,13 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _ProjectUserTests, - _ProjectReaderMemberTests): +class ProjectMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _ProjectUserTests, + _ProjectReaderMemberTests, +): def setUp(self): super(ProjectMemberTests, self).setUp() @@ -1479,8 +1586,9 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, project_member = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(project_member)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) self.expected = [ @@ -1488,11 +1596,13 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, { 'user_id': self.user_id, 'project_id': self.project_id, - 'role_id': self.bootstrapper.member_role_id - }] + 'role_id': self.bootstrapper.member_role_id, + } + ] auth = self.build_authentication_request( - user_id=self.user_id, password=project_member['password'], + user_id=self.user_id, + password=project_member['password'], project_id=self.project_id, ) @@ -1504,10 +1614,12 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _AssignmentTestUtilities, - _ProjectUserTests): +class ProjectAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _AssignmentTestUtilities, + _ProjectUserTests, +): def setUp(self): super(ProjectAdminTests, self).setUp() @@ -1534,8 +1646,9 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, self.project_id = project['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + project_id=self.project_id, ) self.expected = [ @@ -1543,13 +1656,14 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, { 'user_id': self.user_id, 'project_id': self.project_id, - 'role_id': self.bootstrapper.admin_role_id - }] + 'role_id': self.bootstrapper.admin_role_id, + } + ] auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -1575,7 +1689,7 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, ), 'identity:list_role_assignments_for_tree': ( rp.SYSTEM_READER_OR_PROJECT_DOMAIN_READER_OR_PROJECT_ADMIN - ) + ), } f.write(jsonutils.dumps(overridden_policies)) @@ -1585,25 +1699,30 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id, - parent_id=self.project_id) + unit.new_project_ref( + domain_id=self.domain_id, parent_id=self.project_id + ), ) PROVIDERS.assignment_api.create_grant( self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + project_id=project['id'], ) expected = copy.copy(self.expected) - expected.append({ - 'project_id': project['id'], - 'user_id': user['id'], - 'role_id': self.bootstrapper.reader_role_id - }) + expected.append( + { + 'project_id': project['id'], + 'user_id': user['id'], + 'role_id': self.bootstrapper.reader_role_id, + } + ) with self.test_client() as c: r = c.get( - ('/v3/role_assignments?scope.project.id=%s&include_subtree' % - self.project_id), - headers=self.headers + ( + '/v3/role_assignments?scope.project.id=%s&include_subtree' + % self.project_id + ), + headers=self.headers, ) self.assertEqual(len(expected), len(r.json['role_assignments'])) actual = self._extract_role_assignments_from_response_body(r) @@ -1615,18 +1734,19 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, unit.new_user_ref(domain_id=self.domain_id) ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) PROVIDERS.assignment_api.create_grant( self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + project_id=project['id'], ) with self.test_client() as c: c.get( - ('/v3/role_assignments?scope.project.id=%s&include_subtree' % - project['id']), + ( + '/v3/role_assignments?scope.project.id=%s&include_subtree' + % project['id'] + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) diff --git a/keystone/tests/protection/v3/test_consumer.py b/keystone/tests/protection/v3/test_consumer.py index 223bccaebd..e7c3373fc0 100644 --- a/keystone/tests/protection/v3/test_consumer.py +++ b/keystone/tests/protection/v3/test_consumer.py @@ -29,51 +29,55 @@ class _SystemUserOauth1ConsumerTests(object): """Common default functionality for all system users.""" def test_user_can_get_consumer(self): - ref = PROVIDERS.oauth_api.create_consumer( - {'id': uuid.uuid4().hex}) + ref = PROVIDERS.oauth_api.create_consumer({'id': uuid.uuid4().hex}) with self.test_client() as c: - c.get('/v3/OS-OAUTH1/consumers/%s' % ref['id'], - headers=self.headers) + c.get( + '/v3/OS-OAUTH1/consumers/%s' % ref['id'], headers=self.headers + ) def test_user_can_list_consumers(self): - PROVIDERS.oauth_api.create_consumer( - {'id': uuid.uuid4().hex}) + PROVIDERS.oauth_api.create_consumer({'id': uuid.uuid4().hex}) with self.test_client() as c: - c.get('/v3/OS-OAUTH1/consumers', - headers=self.headers) + c.get('/v3/OS-OAUTH1/consumers', headers=self.headers) class _SystemReaderAndMemberOauth1ConsumerTests(object): def test_user_cannot_create_consumer(self): with self.test_client() as c: - c.post('/v3/OS-OAUTH1/consumers', - json={'consumer': {}}, - expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + c.post( + '/v3/OS-OAUTH1/consumers', + json={'consumer': {}}, + expected_status_code=http.client.FORBIDDEN, + headers=self.headers, + ) def test_user_cannot_update_consumer(self): - ref = PROVIDERS.oauth_api.create_consumer( - {'id': uuid.uuid4().hex}) + ref = PROVIDERS.oauth_api.create_consumer({'id': uuid.uuid4().hex}) with self.test_client() as c: - c.patch('/v3/OS-OAUTH1/consumers/%s' % ref['id'], - json={'consumer': {'description': uuid.uuid4().hex}}, - expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + c.patch( + '/v3/OS-OAUTH1/consumers/%s' % ref['id'], + json={'consumer': {'description': uuid.uuid4().hex}}, + expected_status_code=http.client.FORBIDDEN, + headers=self.headers, + ) def test_user_cannot_delete_consumer(self): - ref = PROVIDERS.oauth_api.create_consumer( - {'id': uuid.uuid4().hex}) + ref = PROVIDERS.oauth_api.create_consumer({'id': uuid.uuid4().hex}) with self.test_client() as c: - c.delete('/v3/OS-OAUTH1/consumers/%s' % ref['id'], - expected_status_code=http.client.FORBIDDEN, - headers=self.headers) + c.delete( + '/v3/OS-OAUTH1/consumers/%s' % ref['id'], + expected_status_code=http.client.FORBIDDEN, + headers=self.headers, + ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserOauth1ConsumerTests, - _SystemReaderAndMemberOauth1ConsumerTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserOauth1ConsumerTests, + _SystemReaderAndMemberOauth1ConsumerTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -84,16 +88,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -104,10 +107,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserOauth1ConsumerTests, - _SystemReaderAndMemberOauth1ConsumerTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserOauth1ConsumerTests, + _SystemReaderAndMemberOauth1ConsumerTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -118,16 +123,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -138,9 +142,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserOauth1ConsumerTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserOauth1ConsumerTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -154,7 +160,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -166,21 +172,24 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_can_create_consumer(self): with self.test_client() as c: - c.post('/v3/OS-OAUTH1/consumers', - json={'consumer': {}}, - headers=self.headers) + c.post( + '/v3/OS-OAUTH1/consumers', + json={'consumer': {}}, + headers=self.headers, + ) def test_user_can_update_consumer(self): - ref = PROVIDERS.oauth_api.create_consumer( - {'id': uuid.uuid4().hex}) + ref = PROVIDERS.oauth_api.create_consumer({'id': uuid.uuid4().hex}) with self.test_client() as c: - c.patch('/v3/OS-OAUTH1/consumers/%s' % ref['id'], - json={'consumer': {'description': uuid.uuid4().hex}}, - headers=self.headers) + c.patch( + '/v3/OS-OAUTH1/consumers/%s' % ref['id'], + json={'consumer': {'description': uuid.uuid4().hex}}, + headers=self.headers, + ) def test_user_can_delete_consumer(self): - ref = PROVIDERS.oauth_api.create_consumer( - {'id': uuid.uuid4().hex}) + ref = PROVIDERS.oauth_api.create_consumer({'id': uuid.uuid4().hex}) with self.test_client() as c: - c.delete('/v3/OS-OAUTH1/consumers/%s' % ref['id'], - headers=self.headers) + c.delete( + '/v3/OS-OAUTH1/consumers/%s' % ref['id'], headers=self.headers + ) diff --git a/keystone/tests/protection/v3/test_credentials.py b/keystone/tests/protection/v3/test_credentials.py index 5a1960e38e..0074089790 100644 --- a/keystone/tests/protection/v3/test_credentials.py +++ b/keystone/tests/protection/v3/test_credentials.py @@ -36,7 +36,7 @@ class _UserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'user_id': self.user_id, - 'type': uuid.uuid4().hex + 'type': uuid.uuid4().hex, } } with self.test_client() as c: @@ -48,7 +48,7 @@ class _UserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': self.user_id + 'user_id': self.user_id, } } r = c.post('/v3/credentials', json=create, headers=self.headers) @@ -56,9 +56,7 @@ class _UserCredentialTests(object): path = '/v3/credentials/%s' % credential_id r = c.get(path, headers=self.headers) - self.assertEqual( - self.user_id, r.json['credential']['user_id'] - ) + self.assertEqual(self.user_id, r.json['credential']['user_id']) def test_user_can_list_their_credentials(self): with self.test_client() as c: @@ -68,7 +66,7 @@ class _UserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': self.user_id + 'user_id': self.user_id, } } r = c.post( @@ -87,24 +85,20 @@ class _UserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': credential_type, - 'user_id': self.user_id + 'user_id': self.user_id, } } - r = c.post( - '/v3/credentials', json=create, headers=self.headers - ) + r = c.post('/v3/credentials', json=create, headers=self.headers) expected_credential_id = r.json['credential']['id'] create = { 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': self.user_id + 'user_id': self.user_id, } } - r = c.post( - '/v3/credentials', json=create, headers=self.headers - ) + r = c.post('/v3/credentials', json=create, headers=self.headers) path = '/v3/credentials?type=%s' % credential_type r = c.get(path, headers=self.headers) @@ -124,7 +118,7 @@ class _UserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': self.user_id + 'user_id': self.user_id, } } @@ -143,7 +137,7 @@ class _UserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': self.user_id + 'user_id': self.user_id, } } r = c.post('/v3/credentials', json=create, headers=self.headers) @@ -165,12 +159,14 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -182,7 +178,7 @@ class _ProjectUsersTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -191,15 +187,17 @@ class _ProjectUsersTests(object): with self.test_client() as c: path = '/v3/credentials/%s' % credential_id c.get( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_non_existant_credential_forbidden(self): with self.test_client() as c: c.get( - '/v3/credentials/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/credentials/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_credentials_for_other_users(self): @@ -211,12 +209,14 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -228,7 +228,7 @@ class _ProjectUsersTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } c.post('/v3/credentials', json=create, headers=headers) @@ -247,12 +247,14 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) credential_type = uuid.uuid4().hex @@ -265,7 +267,7 @@ class _ProjectUsersTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': credential_type, - 'user_id': user['id'] + 'user_id': user['id'], } } c.post('/v3/credentials', json=create, headers=headers) @@ -284,12 +286,14 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -303,7 +307,7 @@ class _ProjectUsersTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -323,12 +327,14 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -340,7 +346,7 @@ class _ProjectUsersTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -350,8 +356,10 @@ class _ProjectUsersTests(object): update = {'credential': {'blob': uuid.uuid4().hex}} path = '/v3/credentials/%s' % credential_id c.patch( - path, json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existant_credential_forbidden(self): @@ -359,9 +367,10 @@ class _ProjectUsersTests(object): update = {'credential': {'blob': uuid.uuid4().hex}} c.patch( - '/v3/credentials/%s' % uuid.uuid4().hex, json=update, + '/v3/credentials/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_credentials_for_other_users(self): @@ -374,12 +383,14 @@ class _ProjectUsersTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } c.post( - '/v3/credentials', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/credentials', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_credentials_for_others(self): @@ -391,12 +402,14 @@ class _ProjectUsersTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -408,7 +421,7 @@ class _ProjectUsersTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -417,15 +430,17 @@ class _ProjectUsersTests(object): with self.test_client() as c: path = '/v3/credentials/%s' % credential_id c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existant_credential_forbidden(self): with self.test_client() as c: c.delete( - '/v3/credentials/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/credentials/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -441,12 +456,14 @@ class _SystemUserCredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -458,7 +475,7 @@ class _SystemUserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -473,8 +490,9 @@ class _SystemUserCredentialTests(object): def test_user_cannot_get_non_existant_credential_not_found(self): with self.test_client() as c: c.get( - '/v3/credentials/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + '/v3/credentials/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, ) def test_user_can_filter_credentials_by_type_for_others(self): @@ -486,12 +504,14 @@ class _SystemUserCredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) credential_type = uuid.uuid4().hex @@ -504,7 +524,7 @@ class _SystemUserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': credential_type, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -514,7 +534,7 @@ class _SystemUserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } c.post('/v3/credentials', json=create, headers=headers) @@ -535,12 +555,14 @@ class _SystemUserCredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -554,7 +576,7 @@ class _SystemUserCredentialTests(object): 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -569,10 +591,12 @@ class _SystemUserCredentialTests(object): self.assertEqual(user['id'], credential['user_id']) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _SystemUserCredentialTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _SystemUserCredentialTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -583,16 +607,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -612,12 +635,14 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } c.post( - '/v3/credentials', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/credentials', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_credentials_for_others(self): @@ -629,12 +654,14 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -646,7 +673,7 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -656,8 +683,10 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, update = {'credential': {'blob': uuid.uuid4().hex}} path = '/v3/credentials/%s' % credential_id c.patch( - path, json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existant_credential_forbidden(self): @@ -665,9 +694,10 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, update = {'credential': {'blob': uuid.uuid4().hex}} c.patch( - '/v3/credentials/%s' % uuid.uuid4().hex, json=update, + '/v3/credentials/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_credentials_for_others(self): @@ -679,12 +709,14 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -696,7 +728,7 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -705,22 +737,26 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: path = '/v3/credentials/%s' % credential_id c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existant_credential_forbidden(self): with self.test_client() as c: c.delete( - '/v3/credentials/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/credentials/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _SystemUserCredentialTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _SystemUserCredentialTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -731,16 +767,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -760,12 +795,14 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } c.post( - '/v3/credentials', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/credentials', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_credentials_for_others(self): @@ -777,12 +814,14 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -794,7 +833,7 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -804,8 +843,10 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, update = {'credential': {'blob': uuid.uuid4().hex}} path = '/v3/credentials/%s' % credential_id c.patch( - path, json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existant_credential_forbidden(self): @@ -813,9 +854,10 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, update = {'credential': {'blob': uuid.uuid4().hex}} c.patch( - '/v3/credentials/%s' % uuid.uuid4().hex, json=update, + '/v3/credentials/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_credentials_for_others(self): @@ -827,12 +869,14 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -844,7 +888,7 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -853,22 +897,26 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: path = '/v3/credentials/%s' % credential_id c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existant_credential_forbidden(self): with self.test_client() as c: c.delete( - '/v3/credentials/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/credentials/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _SystemUserCredentialTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _SystemUserCredentialTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -882,7 +930,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -902,7 +950,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } c.post('/v3/credentials', json=create, headers=self.headers) @@ -916,12 +964,14 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -933,7 +983,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -952,9 +1002,10 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, update = {'credential': {'blob': uuid.uuid4().hex}} c.patch( - '/v3/credentials/%s' % uuid.uuid4().hex, json=update, + '/v3/credentials/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) def test_user_can_delete_credentials_for_others(self): @@ -966,12 +1017,14 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: @@ -983,7 +1036,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, 'credential': { 'blob': uuid.uuid4().hex, 'type': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } } r = c.post('/v3/credentials', json=create, headers=headers) @@ -996,15 +1049,18 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_cannot_delete_non_existant_credential_not_found(self): with self.test_client() as c: c.delete( - '/v3/credentials/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + '/v3/credentials/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, ) -class ProjectReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _ProjectUsersTests): +class ProjectReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _ProjectUsersTests, +): def setUp(self): super(ProjectReaderTests, self).setUp() @@ -1015,9 +1071,7 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, project_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - project_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(project_reader)['id'] project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id ) @@ -1025,14 +1079,15 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=project_reader['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -1043,10 +1098,12 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _ProjectUsersTests): +class ProjectMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _ProjectUsersTests, +): def setUp(self): super(ProjectMemberTests, self).setUp() @@ -1057,9 +1114,7 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, project_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - project_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(project_member)['id'] project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id ) @@ -1067,14 +1122,15 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=project_member['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -1085,10 +1141,12 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _ProjectUsersTests): +class ProjectAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _ProjectUsersTests, +): def setUp(self): super(ProjectAdminTests, self).setUp() @@ -1110,7 +1168,7 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -1131,21 +1189,21 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, # broken behavior with better scope checking. with open(self.policy_file_name, 'w') as f: overridden_policies = { - 'identity:get_credential': - bp.ADMIN_OR_SYSTEM_READER_OR_CRED_OWNER, - 'identity:list_credentials': - bp.ADMIN_OR_SYSTEM_READER_OR_CRED_OWNER, + 'identity:get_credential': bp.ADMIN_OR_SYSTEM_READER_OR_CRED_OWNER, + 'identity:list_credentials': bp.ADMIN_OR_SYSTEM_READER_OR_CRED_OWNER, 'identity:create_credential': bp.ADMIN_OR_CRED_OWNER, 'identity:update_credential': bp.ADMIN_OR_CRED_OWNER, - 'identity:delete_credential': bp.ADMIN_OR_CRED_OWNER + 'identity:delete_credential': bp.ADMIN_OR_CRED_OWNER, } f.write(jsonutils.dumps(overridden_policies)) -class ProjectReaderTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _ProjectUsersTests): +class ProjectReaderTestsEnforceScopeFalse( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _ProjectUsersTests, +): def setUp(self): super(ProjectReaderTestsEnforceScopeFalse, self).setUp() @@ -1156,9 +1214,7 @@ class ProjectReaderTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, project_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - project_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(project_reader)['id'] project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id ) @@ -1166,14 +1222,15 @@ class ProjectReaderTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=project_reader['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -1184,10 +1241,12 @@ class ProjectReaderTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectMemberTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _ProjectUsersTests): +class ProjectMemberTestsEnforceScopeFalse( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _ProjectUsersTests, +): def setUp(self): super(ProjectMemberTestsEnforceScopeFalse, self).setUp() @@ -1198,9 +1257,7 @@ class ProjectMemberTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, project_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - project_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(project_member)['id'] project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id ) @@ -1208,14 +1265,15 @@ class ProjectMemberTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=project_member['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -1226,10 +1284,12 @@ class ProjectMemberTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectAdminTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserCredentialTests, - _SystemUserCredentialTests): +class ProjectAdminTestsEnforceScopeFalse( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserCredentialTests, + _SystemUserCredentialTests, +): def setUp(self): super(ProjectAdminTestsEnforceScopeFalse, self).setUp() @@ -1243,7 +1303,7 @@ class ProjectAdminTestsEnforceScopeFalse(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_domain_config.py b/keystone/tests/protection/v3/test_domain_config.py index 1f25aa97d6..b4b264b54d 100644 --- a/keystone/tests/protection/v3/test_domain_config.py +++ b/keystone/tests/protection/v3/test_domain_config.py @@ -32,29 +32,36 @@ class _SystemDomainAndProjectUserDomainConfigTests(object): password_regex = uuid.uuid4().hex password_regex_description = uuid.uuid4().hex self.config_fixture.config( - group='security_compliance', - password_regex=password_regex + group='security_compliance', password_regex=password_regex ) self.config_fixture.config( group='security_compliance', - password_regex_description=password_regex_description + password_regex_description=password_regex_description, ) with self.test_client() as c: - c.get('/v3/domains/%s/config/security_compliance' - % CONF.identity.default_domain_id, headers=self.headers) + c.get( + '/v3/domains/%s/config/security_compliance' + % CONF.identity.default_domain_id, + headers=self.headers, + ) def test_user_can_get_security_compliance_domain_config_option(self): password_regex_description = uuid.uuid4().hex self.config_fixture.config( group='security_compliance', - password_regex_description=password_regex_description + password_regex_description=password_regex_description, ) with self.test_client() as c: - c.get('/v3/domains/%s/config/security_compliance' - '/password_regex_description' - % CONF.identity.default_domain_id, headers=self.headers) + c.get( + '/v3/domains/%s/config/security_compliance' + '/password_regex_description' + % CONF.identity.default_domain_id, + headers=self.headers, + ) - def test_can_get_security_compliance_config_with_user_from_other_domain(self): # noqa: E501 + def test_can_get_security_compliance_config_with_user_from_other_domain( + self, + ): # noqa: E501 domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) @@ -70,24 +77,21 @@ class _SystemDomainAndProjectUserDomainConfigTests(object): # Give the new user a non-admin role on the project PROVIDERS.assignment_api.add_role_to_user_and_project( - user['id'], - project['id'], - role['id'] + user['id'], project['id'], role['id'] ) password_regex = uuid.uuid4().hex password_regex_description = uuid.uuid4().hex group = 'security_compliance' + self.config_fixture.config(group=group, password_regex=password_regex) self.config_fixture.config( - group=group, - password_regex=password_regex - ) - self.config_fixture.config( - group=group, - password_regex_description=password_regex_description + group=group, password_regex_description=password_regex_description ) with self.test_client() as c: - c.get('/v3/domains/%s/config/security_compliance' - % CONF.identity.default_domain_id, headers=self.headers) + c.get( + '/v3/domains/%s/config/security_compliance' + % CONF.identity.default_domain_id, + headers=self.headers, + ) class _SystemUserDomainConfigTests(object): @@ -97,40 +101,49 @@ class _SystemUserDomainConfigTests(object): uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.get('/v3/domains/%s/config' - % domain['id'], headers=self.headers) + c.get('/v3/domains/%s/config' % domain['id'], headers=self.headers) def test_user_can_get_domain_group_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.get('/v3/domains/%s/config/ldap' - % domain['id'], headers=self.headers) + c.get( + '/v3/domains/%s/config/ldap' % domain['id'], + headers=self.headers, + ) def test_user_can_get_config_by_group_invalid_domain(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) invalid_domain_id = uuid.uuid4().hex with self.test_client() as c: - c.get('/v3/domains/%s/config/ldap' - % invalid_domain_id, headers=self.headers, - expected_status_code=http.client.NOT_FOUND) + c.get( + '/v3/domains/%s/config/ldap' % invalid_domain_id, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, + ) def test_user_can_get_non_existent_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) with self.test_client() as c: - c.get('/v3/domains/%s/config' % domain['id'], headers=self.headers, - expected_status_code=http.client.NOT_FOUND) + c.get( + '/v3/domains/%s/config' % domain['id'], + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, + ) def test_user_can_get_non_existent_config_group_invalid_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -140,19 +153,24 @@ class _SystemUserDomainConfigTests(object): PROVIDERS.domain_config_api.create_config(domain['id'], config) invalid_domain_id = uuid.uuid4().hex with self.test_client() as c: - c.get('/v3/domains/%s/config/ldap' - % invalid_domain_id, headers=self.headers, - expected_status_code=http.client.NOT_FOUND) + c.get( + '/v3/domains/%s/config/ldap' % invalid_domain_id, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, + ) def test_user_can_get_domain_config_option(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.get('/v3/domains/%s/config/ldap/url' - % domain['id'], headers=self.headers) + c.get( + '/v3/domains/%s/config/ldap/url' % domain['id'], + headers=self.headers, + ) def test_user_can_get_non_existent_config_option(self): domain = PROVIDERS.resource_api.create_domain( @@ -161,9 +179,11 @@ class _SystemUserDomainConfigTests(object): config = {'ldap': {'url': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config(domain['id'], config) with self.test_client() as c: - c.get('/v3/domains/%s/config/ldap/user_tree_dn' - % domain['id'], headers=self.headers, - expected_status_code=http.client.NOT_FOUND) + c.get( + '/v3/domains/%s/config/ldap/user_tree_dn' % domain['id'], + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, + ) def test_user_can_get_non_existent_config_option_invalid_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -173,55 +193,63 @@ class _SystemUserDomainConfigTests(object): PROVIDERS.domain_config_api.create_config(domain['id'], config) invalid_domain_id = uuid.uuid4().hex with self.test_client() as c: - c.get('/v3/domains/%s/config/ldap/user_tree_dn' - % invalid_domain_id, headers=self.headers, - expected_status_code=http.client.NOT_FOUND) + c.get( + '/v3/domains/%s/config/ldap/user_tree_dn' % invalid_domain_id, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, + ) def test_user_can_get_security_compliance_domain_config(self): # Set the security compliance configuration options password_regex = uuid.uuid4().hex password_regex_description = uuid.uuid4().hex self.config_fixture.config( - group='security_compliance', - password_regex=password_regex + group='security_compliance', password_regex=password_regex ) self.config_fixture.config( group='security_compliance', - password_regex_description=password_regex_description + password_regex_description=password_regex_description, ) with self.test_client() as c: - c.get('/v3/domains/%s/config/security_compliance' - % CONF.identity.default_domain_id, headers=self.headers) + c.get( + '/v3/domains/%s/config/security_compliance' + % CONF.identity.default_domain_id, + headers=self.headers, + ) def test_user_can_get_security_compliance_domain_config_option(self): password_regex_description = uuid.uuid4().hex self.config_fixture.config( group='security_compliance', - password_regex_description=password_regex_description + password_regex_description=password_regex_description, ) with self.test_client() as c: - c.get('/v3/domains/%s/config/security_compliance' - '/password_regex_description' - % CONF.identity.default_domain_id, headers=self.headers) + c.get( + '/v3/domains/%s/config/security_compliance' + '/password_regex_description' + % CONF.identity.default_domain_id, + headers=self.headers, + ) - def test_can_get_security_compliance_config_with_user_from_other_domain(self): # noqa: E501 + def test_can_get_security_compliance_config_with_user_from_other_domain( + self, + ): # noqa: E501 domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) password_regex = uuid.uuid4().hex password_regex_description = uuid.uuid4().hex group = 'security_compliance' + self.config_fixture.config(group=group, password_regex=password_regex) self.config_fixture.config( - group=group, - password_regex=password_regex - ) - self.config_fixture.config( - group=group, - password_regex_description=password_regex_description + group=group, password_regex_description=password_regex_description ) with self.test_client() as c: - c.get('/v3/domains/%s/config/security_compliance' - % CONF.identity.default_domain_id, headers=self.headers) + c.get( + '/v3/domains/%s/config/security_compliance' + % CONF.identity.default_domain_id, + headers=self.headers, + ) def test_user_can_get_domain_config_default(self): with self.test_client() as c: @@ -243,39 +271,49 @@ class _SystemReaderMemberDomainAndProjectUserDomainConfigTests(object): uuid.uuid4().hex, unit.new_domain_ref() ) with self.test_client() as c: - c.put('/v3/domains/%s/config' - % domain['id'], - json={'config': unit.new_domain_config_ref()}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.put( + '/v3/domains/%s/config' % domain['id'], + json={'config': unit.new_domain_config_ref()}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_update_domain_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) - new_config = {'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + domain['id'], unit.new_domain_config_ref() + ) + new_config = { + 'ldap': {'url': uuid.uuid4().hex}, + 'identity': {'driver': uuid.uuid4().hex}, + } with self.test_client() as c: - c.patch('/v3/domains/%s/config' - % domain['id'], json={'config': new_config}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.patch( + '/v3/domains/%s/config' % domain['id'], + json={'config': new_config}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_update_domain_group_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} + domain['id'], unit.new_domain_config_ref() + ) + new_config = { + 'ldap': {'url': uuid.uuid4().hex, 'user_filter': uuid.uuid4().hex} + } with self.test_client() as c: - c.patch('/v3/domains/%s/config/ldap' - % domain['id'], json={'config': new_config}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.patch( + '/v3/domains/%s/config/ldap' % domain['id'], + json={'config': new_config}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_update_domain_config_option(self): domain = PROVIDERS.resource_api.create_domain( @@ -283,46 +321,57 @@ class _SystemReaderMemberDomainAndProjectUserDomainConfigTests(object): ) new_config = {'url': uuid.uuid4().hex} PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.patch('/v3/domains/%s/config/ldap/url' - % domain['id'], - json={'config': new_config}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.patch( + '/v3/domains/%s/config/ldap/url' % domain['id'], + json={'config': new_config}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_delete_domain_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.delete('/v3/domains/%s/config' % domain['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/domains/%s/config' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_delete_domain_group_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.delete('/v3/domains/%s/config/ldap' - % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/domains/%s/config/ldap' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_delete_domain_config_option(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.delete('/v3/domains/%s/config/ldap/url' - % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/domains/%s/config/ldap/url' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) class _DomainAndProjectUserDomainConfigTests(object): @@ -332,64 +381,86 @@ class _DomainAndProjectUserDomainConfigTests(object): uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.get('/v3/domains/%s/config' - % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/domains/%s/config' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_domain_group_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.get('/v3/domains/%s/config/ldap' - % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/domains/%s/config/ldap' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_non_existant_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) with self.test_client() as c: - c.get('/v3/domains/%s/config' % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/domains/%s/config' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_domain_config_option(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.get('/v3/domains/%s/config/ldap/url' - % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/domains/%s/config/ldap/url' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_domain_config_default(self): with self.test_client() as c: - c.get('/v3/domains/config/default', headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/domains/config/default', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_domain_group_config_default(self): with self.test_client() as c: - c.get('/v3/domains/config/ldap/default', headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/domains/config/ldap/default', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_domain_config_option_default(self): with self.test_client() as c: - c.get('/v3/domains/config/ldap/url/default', headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/domains/config/ldap/url/default', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) class SystemReaderTests( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainConfigTests, - _SystemReaderMemberDomainAndProjectUserDomainConfigTests, - _SystemDomainAndProjectUserDomainConfigTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainConfigTests, + _SystemReaderMemberDomainAndProjectUserDomainConfigTests, + _SystemDomainAndProjectUserDomainConfigTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -400,16 +471,15 @@ class SystemReaderTests( system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -421,11 +491,12 @@ class SystemReaderTests( class SystemMemberTests( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainConfigTests, - _SystemReaderMemberDomainAndProjectUserDomainConfigTests, - _SystemDomainAndProjectUserDomainConfigTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainConfigTests, + _SystemReaderMemberDomainAndProjectUserDomainConfigTests, + _SystemDomainAndProjectUserDomainConfigTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -436,16 +507,15 @@ class SystemMemberTests( system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -456,10 +526,12 @@ class SystemMemberTests( self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainConfigTests, - _SystemDomainAndProjectUserDomainConfigTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainConfigTests, + _SystemDomainAndProjectUserDomainConfigTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -473,7 +545,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -488,46 +560,57 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, uuid.uuid4().hex, unit.new_domain_ref() ) with self.test_client() as c: - c.put('/v3/domains/%s/config' - % domain['id'], - json={'config': unit.new_domain_config_ref()}, - headers=self.headers, - expected_status_code=http.client.CREATED) + c.put( + '/v3/domains/%s/config' % domain['id'], + json={'config': unit.new_domain_config_ref()}, + headers=self.headers, + expected_status_code=http.client.CREATED, + ) def test_user_cannot_create_invalid_domain_config(self): invalid_domain_id = uuid.uuid4().hex with self.test_client() as c: - c.put('/v3/domains/%s/config' - % invalid_domain_id, - json={'config': unit.new_domain_config_ref()}, - headers=self.headers, - expected_status_code=http.client.NOT_FOUND) + c.put( + '/v3/domains/%s/config' % invalid_domain_id, + json={'config': unit.new_domain_config_ref()}, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, + ) def test_user_can_update_domain_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) - new_config = {'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + new_config = { + 'ldap': {'url': uuid.uuid4().hex}, + 'identity': {'driver': uuid.uuid4().hex}, + } PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.patch('/v3/domains/%s/config' - % domain['id'], json={'config': new_config}, - headers=self.headers) + c.patch( + '/v3/domains/%s/config' % domain['id'], + json={'config': new_config}, + headers=self.headers, + ) def test_user_can_update_domain_group_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} + new_config = { + 'ldap': {'url': uuid.uuid4().hex, 'user_filter': uuid.uuid4().hex} + } PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.patch('/v3/domains/%s/config/ldap' - % domain['id'], json={'config': new_config}, - headers=self.headers) + c.patch( + '/v3/domains/%s/config/ldap' % domain['id'], + json={'config': new_config}, + headers=self.headers, + ) def test_user_can_update_domain_config_option(self): domain = PROVIDERS.resource_api.create_domain( @@ -535,61 +618,76 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) new_config = {'url': uuid.uuid4().hex} PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.patch('/v3/domains/%s/config/ldap/url' - % domain['id'], json={'config': new_config}, - headers=self.headers) + c.patch( + '/v3/domains/%s/config/ldap/url' % domain['id'], + json={'config': new_config}, + headers=self.headers, + ) def test_user_can_delete_domain_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.delete('/v3/domains/%s/config' % domain['id'], - headers=self.headers) + c.delete( + '/v3/domains/%s/config' % domain['id'], headers=self.headers + ) def test_user_can_delete_domain_group_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.delete('/v3/domains/%s/config/ldap' - % domain['id'], headers=self.headers) + c.delete( + '/v3/domains/%s/config/ldap' % domain['id'], + headers=self.headers, + ) def test_user_can_delete_domain_config_option(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) with self.test_client() as c: - c.delete('/v3/domains/%s/config/ldap/url' - % domain['id'], headers=self.headers) + c.delete( + '/v3/domains/%s/config/ldap/url' % domain['id'], + headers=self.headers, + ) def test_user_cannot_delete_invalid_domain_config(self): domain = PROVIDERS.resource_api.create_domain( uuid.uuid4().hex, unit.new_domain_ref() ) PROVIDERS.domain_config_api.create_config( - domain['id'], unit.new_domain_config_ref()) + domain['id'], unit.new_domain_config_ref() + ) invalid_domain_id = uuid.uuid4().hex with self.test_client() as c: - c.delete('/v3/domains/%s/config' % invalid_domain_id, - headers=self.headers, - expected_status_code=http.client.NOT_FOUND) + c.delete( + '/v3/domains/%s/config' % invalid_domain_id, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, + ) class DomainUserTests( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemDomainAndProjectUserDomainConfigTests, - _DomainAndProjectUserDomainConfigTests, - _SystemReaderMemberDomainAndProjectUserDomainConfigTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemDomainAndProjectUserDomainConfigTests, + _DomainAndProjectUserDomainConfigTests, + _SystemReaderMemberDomainAndProjectUserDomainConfigTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -604,14 +702,15 @@ class DomainUserTests( domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -623,11 +722,12 @@ class DomainUserTests( class ProjectUserTests( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemDomainAndProjectUserDomainConfigTests, - _DomainAndProjectUserDomainConfigTests, - _SystemReaderMemberDomainAndProjectUserDomainConfigTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemDomainAndProjectUserDomainConfigTests, + _DomainAndProjectUserDomainConfigTests, + _SystemReaderMemberDomainAndProjectUserDomainConfigTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -639,7 +739,7 @@ class ProjectUserTests( auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -651,11 +751,12 @@ class ProjectUserTests( class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemDomainAndProjectUserDomainConfigTests, - _DomainAndProjectUserDomainConfigTests, - _SystemReaderMemberDomainAndProjectUserDomainConfigTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemDomainAndProjectUserDomainConfigTests, + _DomainAndProjectUserDomainConfigTests, + _SystemReaderMemberDomainAndProjectUserDomainConfigTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -677,14 +778,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_domain_roles.py b/keystone/tests/protection/v3/test_domain_roles.py index 6be109b930..45c7dd65ee 100644 --- a/keystone/tests/protection/v3/test_domain_roles.py +++ b/keystone/tests/protection/v3/test_domain_roles.py @@ -31,18 +31,20 @@ class _SystemUserDomainRoleTests(object): def test_user_can_list_domain_roles(self): PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id)) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), + ) with self.test_client() as c: r = c.get( '/v3/roles?domain_id=%s' % CONF.identity.default_domain_id, - headers=self.headers) + headers=self.headers, + ) self.assertEqual(1, len(r.json['roles'])) def test_user_can_get_a_domain_role(self): role = PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: @@ -54,39 +56,47 @@ class _SystemReaderAndMemberDomainRoleTests(object): """Common default functionality for system readers and system members.""" def test_user_cannot_create_domain_roles(self): - create = {'role': unit.new_role_ref( - domain_id=CONF.identity.default_domain_id)} + create = { + 'role': unit.new_role_ref( + domain_id=CONF.identity.default_domain_id + ) + } with self.test_client() as c: c.post( - '/v3/roles', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_domain_roles(self): role = PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), ) update = {'role': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/roles/%s' % role['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_domain_roles(self): role = PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.delete( - '/v3/roles/%s' % role['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -96,67 +106,80 @@ class _DomainAndProjectUserDomainRoleTests(object): def test_user_cannot_list_domain_roles(self): PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id)) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), + ) with self.test_client() as c: c.get( - '/v3/roles', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_a_domain_role(self): role = PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.get( - '/v3/roles/%s' % role['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_domain_roles(self): - create = {'role': unit.new_role_ref( - domain_id=CONF.identity.default_domain_id)} + create = { + 'role': unit.new_role_ref( + domain_id=CONF.identity.default_domain_id + ) + } with self.test_client() as c: c.post( - '/v3/roles', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_domain_roles(self): role = PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), ) update = {'role': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/roles/%s' % role['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_domain_roles(self): role = PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.delete( - '/v3/roles/%s' % role['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainRoleTests, - _SystemReaderAndMemberDomainRoleTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainRoleTests, + _SystemReaderAndMemberDomainRoleTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -167,16 +190,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -187,10 +209,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainRoleTests, - _SystemReaderAndMemberDomainRoleTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainRoleTests, + _SystemReaderAndMemberDomainRoleTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -201,16 +225,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -221,9 +244,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainRoleTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainRoleTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -237,7 +262,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -248,8 +273,11 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} def test_user_can_create_roles(self): - create = {'role': unit.new_role_ref( - domain_id=CONF.identity.default_domain_id)} + create = { + 'role': unit.new_role_ref( + domain_id=CONF.identity.default_domain_id + ) + } with self.test_client() as c: c.post('/v3/roles', json=create, headers=self.headers) @@ -257,29 +285,33 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_can_update_roles(self): role = PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), ) update = {'role': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/roles/%s' % role['id'], json=update, headers=self.headers, + '/v3/roles/%s' % role['id'], + json=update, + headers=self.headers, ) def test_user_can_delete_roles(self): role = PROVIDERS.role_api.create_role( uuid.uuid4().hex, - unit.new_role_ref(domain_id=CONF.identity.default_domain_id) + unit.new_role_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.delete('/v3/roles/%s' % role['id'], headers=self.headers) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserDomainRoleTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserDomainRoleTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -294,14 +326,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -312,9 +345,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserDomainRoleTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserDomainRoleTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -326,7 +361,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -338,9 +373,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserDomainRoleTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserDomainRoleTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -362,14 +398,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_domains.py b/keystone/tests/protection/v3/test_domains.py index 35a4bdd36a..b9525bddaa 100644 --- a/keystone/tests/protection/v3/test_domains.py +++ b/keystone/tests/protection/v3/test_domains.py @@ -53,8 +53,7 @@ class _SystemUserDomainTests(object): with self.test_client() as c: r = c.get( - '/v3/domains?name=%s' % domain_name, - headers=self.headers + '/v3/domains?name=%s' % domain_name, headers=self.headers ) self.assertEqual(1, len(r.json['domains'])) self.assertEqual(domain['id'], r.json['domains'][0]['id']) @@ -99,8 +98,10 @@ class _SystemMemberAndReaderDomainTests(object): with self.test_client() as c: c.post( - '/v3/domains', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/domains', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_a_domain(self): @@ -111,9 +112,10 @@ class _SystemMemberAndReaderDomainTests(object): update = {'domain': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/domains/%s' % domain['id'], json=update, + '/v3/domains/%s' % domain['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_a_domain(self): @@ -123,8 +125,9 @@ class _SystemMemberAndReaderDomainTests(object): with self.test_client() as c: c.delete( - '/v3/domains/%s' % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/domains/%s' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -155,7 +158,7 @@ class _DomainReaderDomainTests(object): # filtering for own domain should succeed r = c.get( '/v3/domains?name=%s' % self.domain['name'], - headers=self.headers + headers=self.headers, ) self.assertEqual(1, len(r.json['domains'])) self.assertNotIn( @@ -166,7 +169,7 @@ class _DomainReaderDomainTests(object): # filtering for the second domain should yield no results r = c.get( '/v3/domains?name=%s' % second_domain['name'], - headers=self.headers + headers=self.headers, ) self.assertEqual(0, len(r.json['domains'])) @@ -209,15 +212,17 @@ class _ProjectUserDomainTests(object): with self.test_client() as c: c.get( - '/v3/domains/%s' % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/domains/%s' % domain['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_domains(self): with self.test_client() as c: c.get( - '/v3/domains', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/domains', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_domains_by_name(self): @@ -233,18 +238,20 @@ class _ProjectUserDomainTests(object): c.get( '/v3/domains?name=%s' % domain_name, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_filter_domains_by_enabled(self): with self.test_client() as c: c.get( - '/v3/domains?enabled=true', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/domains?enabled=true', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) c.get( - '/v3/domains?enabled=false', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/domains?enabled=false', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_a_domain(self): @@ -255,9 +262,10 @@ class _ProjectUserDomainTests(object): update = {'domain': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/domains/%s' % domain['id'], json=update, + '/v3/domains/%s' % domain['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_a_domain(self): @@ -265,8 +273,10 @@ class _ProjectUserDomainTests(object): with self.test_client() as c: c.post( - '/v3/domains', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/domains', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_a_domain(self): @@ -278,12 +288,15 @@ class _ProjectUserDomainTests(object): update = {'domain': {'enabled': False}} path = '/v3/domains/%s' % domain['id'] c.patch( - path, json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_non_existant_domain_forbidden(self): @@ -292,14 +305,16 @@ class _ProjectUserDomainTests(object): c.get( '/v3/domains/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainTests, - _SystemMemberAndReaderDomainTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainTests, + _SystemMemberAndReaderDomainTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -318,8 +333,9 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, ) auth = self.build_authentication_request( - user_id=self.system_reader_id, password=system_reader['password'], - system=True + user_id=self.system_reader_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -330,10 +346,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainTests, - _SystemMemberAndReaderDomainTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainTests, + _SystemMemberAndReaderDomainTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -352,8 +370,9 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, ) auth = self.build_authentication_request( - user_id=self.system_member_id, password=system_member['password'], - system=True + user_id=self.system_member_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -364,9 +383,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserDomainTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserDomainTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -378,7 +399,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.system_admin_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -396,17 +417,16 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, update = {'domain': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/domains/%s' % domain['id'], json=update, - headers=self.headers + '/v3/domains/%s' % domain['id'], + json=update, + headers=self.headers, ) def test_user_can_create_a_domain(self): create = {'domain': {'name': uuid.uuid4().hex}} with self.test_client() as c: - c.post( - '/v3/domains', json=create, headers=self.headers - ) + c.post('/v3/domains', json=create, headers=self.headers) def test_user_can_delete_a_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -420,9 +440,11 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, c.delete(path, headers=self.headers) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainReaderDomainTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainReaderDomainTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -435,17 +457,19 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, ) self.domain_id = self.domain['id'] domain_user = unit.new_user_ref(domain_id=self.domain_id) - self.domain_user_id = PROVIDERS.identity_api.create_user( - domain_user - )['id'] + self.domain_user_id = PROVIDERS.identity_api.create_user(domain_user)[ + 'id' + ] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.domain_user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.domain_user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.domain_user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.domain_user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -456,9 +480,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ProjectUserDomainTests): +class ProjectReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ProjectUserDomainTests, +): def setUp(self): super(ProjectReaderTests, self).setUp() @@ -472,23 +498,24 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, self.domain_id = domain['id'] project_reader = unit.new_user_ref(domain_id=self.domain_id) - project_reader_id = PROVIDERS.identity_api.create_user( - project_reader - )['id'] + project_reader_id = PROVIDERS.identity_api.create_user(project_reader)[ + 'id' + ] project = unit.new_project_ref(domain_id=self.domain_id) project_id = PROVIDERS.resource_api.create_project( project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=project_reader_id, - project_id=project_id + self.bootstrapper.reader_role_id, + user_id=project_reader_id, + project_id=project_id, ) auth = self.build_authentication_request( user_id=project_reader_id, password=project_reader['password'], - project_id=project_id + project_id=project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -499,9 +526,11 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ProjectUserDomainTests): +class ProjectMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ProjectUserDomainTests, +): def setUp(self): super(ProjectMemberTests, self).setUp() @@ -515,23 +544,24 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.domain_id = domain['id'] project_member = unit.new_user_ref(domain_id=self.domain_id) - project_member_id = PROVIDERS.identity_api.create_user( - project_member - )['id'] + project_member_id = PROVIDERS.identity_api.create_user(project_member)[ + 'id' + ] project = unit.new_project_ref(domain_id=self.domain_id) project_id = PROVIDERS.resource_api.create_project( project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=project_member_id, - project_id=project_id + self.bootstrapper.member_role_id, + user_id=project_member_id, + project_id=project_id, ) auth = self.build_authentication_request( user_id=project_member_id, password=project_member['password'], - project_id=project_id + project_id=project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -542,9 +572,11 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ProjectUserDomainTests): +class ProjectAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ProjectUserDomainTests, +): def setUp(self): super(ProjectAdminTests, self).setUp() @@ -565,23 +597,24 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, self.domain_id = domain['id'] project_admin = unit.new_user_ref(domain_id=self.domain_id) - project_admin_id = PROVIDERS.identity_api.create_user( - project_admin - )['id'] + project_admin_id = PROVIDERS.identity_api.create_user(project_admin)[ + 'id' + ] project = unit.new_project_ref(domain_id=self.domain_id) project_id = PROVIDERS.resource_api.create_project( project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=project_admin_id, - project_id=project_id + self.bootstrapper.admin_role_id, + user_id=project_admin_id, + project_id=project_id, ) auth = self.build_authentication_request( user_id=project_admin_id, password=project_admin['password'], - project_id=project_id + project_id=project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_ec2_credential.py b/keystone/tests/protection/v3/test_ec2_credential.py index 1f995d5a08..9b70f76bc0 100644 --- a/keystone/tests/protection/v3/test_ec2_credential.py +++ b/keystone/tests/protection/v3/test_ec2_credential.py @@ -34,22 +34,26 @@ class _UserEC2CredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=project['id'], ) with self.test_client() as c: - r = c.post('/v3/users/%s/credentials/OS-EC2' % self.user_id, - json={'tenant_id': project['id']}, headers=self.headers) + r = c.post( + '/v3/users/%s/credentials/OS-EC2' % self.user_id, + json={'tenant_id': project['id']}, + headers=self.headers, + ) credential_id = r.json['credential']['access'] path = '/v3/users/%s/credentials/OS-EC2/%s' % ( - self.user_id, credential_id) - r = c.get(path, headers=self.headers) - self.assertEqual( - self.user_id, r.json['credential']['user_id'] + self.user_id, + credential_id, ) + r = c.get(path, headers=self.headers) + self.assertEqual(self.user_id, r.json['credential']['user_id']) def test_user_can_list_their_ec2_credentials(self): project = unit.new_project_ref( @@ -57,20 +61,22 @@ class _UserEC2CredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=project['id'], ) with self.test_client() as c: - c.post('/v3/users/%s/credentials/OS-EC2' % self.user_id, - json={'tenant_id': project['id']}, headers=self.headers) + c.post( + '/v3/users/%s/credentials/OS-EC2' % self.user_id, + json={'tenant_id': project['id']}, + headers=self.headers, + ) path = '/v3/users/%s/credentials/OS-EC2' % self.user_id r = c.get(path, headers=self.headers) for credential in r.json['credentials']: - self.assertEqual( - self.user_id, credential['user_id'] - ) + self.assertEqual(self.user_id, credential['user_id']) def test_user_create_their_ec2_credentials(self): project = unit.new_project_ref( @@ -78,14 +84,18 @@ class _UserEC2CredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=project['id'], ) with self.test_client() as c: - c.post('/v3/users/%s/credentials/OS-EC2' % self.user_id, - json={'tenant_id': project['id']}, headers=self.headers, - expected_status_code=http.client.CREATED) + c.post( + '/v3/users/%s/credentials/OS-EC2' % self.user_id, + json={'tenant_id': project['id']}, + headers=self.headers, + expected_status_code=http.client.CREATED, + ) def test_user_delete_their_ec2_credentials(self): project = unit.new_project_ref( @@ -93,18 +103,24 @@ class _UserEC2CredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=project['id'], ) with self.test_client() as c: - r = c.post('/v3/users/%s/credentials/OS-EC2' % self.user_id, - json={'tenant_id': project['id']}, headers=self.headers) + r = c.post( + '/v3/users/%s/credentials/OS-EC2' % self.user_id, + json={'tenant_id': project['id']}, + headers=self.headers, + ) credential_id = r.json['credential']['access'] - c.delete('/v3/users/%s/credentials/OS-EC2/%s' % ( - self.user_id, credential_id), - headers=self.headers) + c.delete( + '/v3/users/%s/credentials/OS-EC2/%s' + % (self.user_id, credential_id), + headers=self.headers, + ) def test_user_cannot_create_ec2_credentials_for_others(self): user = PROVIDERS.identity_api.create_user( @@ -115,14 +131,18 @@ class _UserEC2CredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: - c.post('/v3/users/%s/credentials/OS-EC2' % user['id'], - json={'tenant_id': project['id']}, headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.post( + '/v3/users/%s/credentials/OS-EC2' % user['id'], + json={'tenant_id': project['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_delete_ec2_credentials_for_others(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -133,26 +153,33 @@ class _UserEC2CredentialTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: r = c.post('/v3/auth/tokens', json=user_auth) token_id = r.headers['X-Subject-Token'] headers = {'X-Auth-Token': token_id} - r = c.post('/v3/users/%s/credentials/OS-EC2' % user['id'], - json={'tenant_id': project['id']}, headers=headers) + r = c.post( + '/v3/users/%s/credentials/OS-EC2' % user['id'], + json={'tenant_id': project['id']}, + headers=headers, + ) credential_id = r.json['credential']['access'] - c.delete('/v3/users/%s/credentials/OS-EC2/%s' % ( - self.user_id, credential_id), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/users/%s/credentials/OS-EC2/%s' + % (self.user_id, credential_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) class _SystemUserTests(object): @@ -166,26 +193,34 @@ class _SystemUserTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: r = c.post('/v3/auth/tokens', json=user_auth) token_id = r.headers['X-Subject-Token'] headers = {'X-Auth-Token': token_id} - r = c.post('/v3/users/%s/credentials/OS-EC2' % user['id'], - json={'tenant_id': project['id']}, headers=headers) + r = c.post( + '/v3/users/%s/credentials/OS-EC2' % user['id'], + json={'tenant_id': project['id']}, + headers=headers, + ) credential_id = r.json['credential']['access'] path = '/v3/users/%s/credentials/OS-EC2/%s' % ( - self.user_id, credential_id) - c.get(path, headers=self.headers, - expected_status_code=http.client.OK) + self.user_id, + credential_id, + ) + c.get( + path, headers=self.headers, expected_status_code=http.client.OK + ) class _SystemReaderAndMemberTests(object): @@ -199,30 +234,37 @@ class _SystemReaderAndMemberTests(object): ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: r = c.post('/v3/auth/tokens', json=user_auth) token_id = r.headers['X-Subject-Token'] headers = {'X-Auth-Token': token_id} - c.post('/v3/users/%s/credentials/OS-EC2' % user['id'], - json={'tenant_id': project['id']}, headers=headers) + c.post( + '/v3/users/%s/credentials/OS-EC2' % user['id'], + json={'tenant_id': project['id']}, + headers=headers, + ) path = '/v3/users/%s/credentials/OS-EC2' % self.user_id r = c.get(path, headers=self.headers) self.assertEqual([], r.json['credentials']) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests, - _SystemReaderAndMemberTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, + _SystemReaderAndMemberTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -233,16 +275,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -253,10 +294,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests, - _SystemReaderAndMemberTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, + _SystemReaderAndMemberTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -267,16 +310,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -287,9 +329,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -303,7 +347,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -322,20 +366,25 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: r = c.post('/v3/auth/tokens', json=user_auth) token_id = r.headers['X-Subject-Token'] headers = {'X-Auth-Token': token_id} - c.post('/v3/users/%s/credentials/OS-EC2' % user['id'], - json={'tenant_id': project['id']}, headers=headers) + c.post( + '/v3/users/%s/credentials/OS-EC2' % user['id'], + json={'tenant_id': project['id']}, + headers=headers, + ) path = '/v3/users/%s/credentials/OS-EC2' % self.user_id r = c.get(path, headers=self.headers) @@ -350,13 +399,17 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: - c.post('/v3/users/%s/credentials/OS-EC2' % user['id'], - json={'tenant_id': project['id']}, headers=self.headers) + c.post( + '/v3/users/%s/credentials/OS-EC2' % user['id'], + json={'tenant_id': project['id']}, + headers=self.headers, + ) def test_user_can_delete_ec2_credentials_for_others(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -367,31 +420,40 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) user_auth = self.build_authentication_request( - user_id=user['id'], password=user_password, - project_id=project['id'] + user_id=user['id'], + password=user_password, + project_id=project['id'], ) with self.test_client() as c: r = c.post('/v3/auth/tokens', json=user_auth) token_id = r.headers['X-Subject-Token'] headers = {'X-Auth-Token': token_id} - r = c.post('/v3/users/%s/credentials/OS-EC2' % user['id'], - json={'tenant_id': project['id']}, headers=headers) + r = c.post( + '/v3/users/%s/credentials/OS-EC2' % user['id'], + json={'tenant_id': project['id']}, + headers=headers, + ) credential_id = r.json['credential']['access'] - c.delete('/v3/users/%s/credentials/OS-EC2/%s' % ( - self.user_id, credential_id), - headers=self.headers) + c.delete( + '/v3/users/%s/credentials/OS-EC2/%s' + % (self.user_id, credential_id), + headers=self.headers, + ) -class ProjectAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserEC2CredentialTests, - _SystemReaderAndMemberTests): +class ProjectAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserEC2CredentialTests, + _SystemReaderAndMemberTests, +): def _override_policy(self): # TODO(cmurphy): Remove this once the deprecated policies in @@ -411,7 +473,7 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, 'identity:ec2_list_credentials': reader_or_owner, 'identity:ec2_create_credential': admin_or_cred_owner, 'identity:ec2_update_credential': admin_or_cred_owner, - 'identity:ec2_delete_credential': admin_or_cred_owner + 'identity:ec2_delete_credential': admin_or_cred_owner, } f.write(jsonutils.dumps(overridden_policies)) @@ -435,7 +497,7 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_endpoint_group.py b/keystone/tests/protection/v3/test_endpoint_group.py index 7ae07cabe0..fb82e4caa8 100644 --- a/keystone/tests/protection/v3/test_endpoint_group.py +++ b/keystone/tests/protection/v3/test_endpoint_group.py @@ -30,7 +30,8 @@ class _SystemUserEndpointGroupsTests(object): def test_user_can_list_endpoint_groups(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) @@ -45,30 +46,37 @@ class _SystemUserEndpointGroupsTests(object): def test_user_can_get_an_endpoint_group(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], - headers=self.headers) + c.get( + '/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], + headers=self.headers, + ) def test_user_can_list_projects_associated_with_endpoint_groups(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group['id'], project['id']) + endpoint_group['id'], project['id'] + ) with self.test_client() as c: - r = c.get('/v3/OS-EP-FILTER/endpoint_groups/%s/projects' - % endpoint_group['id'], headers=self.headers) + r = c.get( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects' + % endpoint_group['id'], + headers=self.headers, + ) projects = [] for project in r.json['projects']: projects.append(project['id']) @@ -83,14 +91,17 @@ class _SystemUserEndpointGroupsTests(object): endpoint['id'], endpoint ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - r = c.get('/v3/OS-EP-FILTER/endpoint_groups/%s/endpoints' - % endpoint_group['id'], - headers=self.headers) + r = c.get( + '/v3/OS-EP-FILTER/endpoint_groups/%s/endpoints' + % endpoint_group['id'], + headers=self.headers, + ) endpoints = [] for endpoint in r.json['endpoints']: endpoints.append(endpoint['id']) @@ -98,39 +109,44 @@ class _SystemUserEndpointGroupsTests(object): def test_user_can_get_endpoints_associated_with_endpoint_groups(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group['id'], project['id']) + endpoint_group['id'], project['id'] + ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' - % (endpoint_group['id'], project['id']), - headers=self.headers) + c.get( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' + % (endpoint_group['id'], project['id']), + headers=self.headers, + ) def test_user_can_list_endpoint_groups_with_their_projects(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group['id'], project['id']) + endpoint_group['id'], project['id'] + ) with self.test_client() as c: - r = c.get('/v3/OS-EP-FILTER/projects/%s/endpoint_groups' - % project['id'], - headers=self.headers) + r = c.get( + '/v3/OS-EP-FILTER/projects/%s/endpoint_groups' % project['id'], + headers=self.headers, + ) endpoint_groups = [] for endpoint_group in r.json['endpoint_groups']: endpoint_groups.append(endpoint_group['id']) @@ -145,20 +161,22 @@ class _SystemReaderAndMemberUserEndpointGroupsTests(object): 'id': uuid.uuid4().hex, 'description': uuid.uuid4().hex, 'filters': {'interface': 'public'}, - 'name': uuid.uuid4().hex + 'name': uuid.uuid4().hex, } } with self.test_client() as c: c.post( - '/v3/OS-EP-FILTER/endpoint_groups', json=create, + '/v3/OS-EP-FILTER/endpoint_groups', + json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_endpoint_groups(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) @@ -170,12 +188,13 @@ class _SystemReaderAndMemberUserEndpointGroupsTests(object): '/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_endpoint_groups(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) @@ -184,88 +203,100 @@ class _SystemReaderAndMemberUserEndpointGroupsTests(object): c.delete( '/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_add_endpoint_group_to_project(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.put('/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' - % (endpoint_group['id'], project['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN - ) + c.put( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' + % (endpoint_group['id'], project['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_cannot_remove_endpoint_group_from_project(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.delete('/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' - % (endpoint_group['id'], project['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN - ) + c.delete( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' + % (endpoint_group['id'], project['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) class _DomainAndProjectUserEndpointGroupTests(object): def test_user_cannot_list_endpoint_groups(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/endpoint_groups', headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/endpoint_groups', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_an_endpoint_group(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_list_projects_associated_with_endpoint_groups(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group['id'], project['id']) + endpoint_group['id'], project['id'] + ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/endpoint_groups/%s/projects' - % endpoint_group['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects' + % endpoint_group['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_list_endpoints_associated_with_endpoint_groups(self): service = PROVIDERS.catalog_api.create_service( @@ -276,53 +307,61 @@ class _DomainAndProjectUserEndpointGroupTests(object): endpoint['id'], endpoint ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/endpoint_groups/%s/endpoints' - % endpoint_group['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/endpoint_groups/%s/endpoints' + % endpoint_group['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_endpoints_associated_with_endpoint_groups(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group['id'], project['id']) + endpoint_group['id'], project['id'] + ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' - % (endpoint_group['id'], project['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' + % (endpoint_group['id'], project['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_list_endpoint_groups_with_their_projects(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group['id'], project['id']) + endpoint_group['id'], project['id'] + ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/projects/%s/endpoint_groups' - % project['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/projects/%s/endpoint_groups' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_create_endpoint_groups(self): create = { @@ -330,20 +369,22 @@ class _DomainAndProjectUserEndpointGroupTests(object): 'id': uuid.uuid4().hex, 'description': uuid.uuid4().hex, 'filters': {'interface': 'public'}, - 'name': uuid.uuid4().hex + 'name': uuid.uuid4().hex, } } with self.test_client() as c: c.post( - '/v3/OS-EP-FILTER/endpoint_groups', json=create, + '/v3/OS-EP-FILTER/endpoint_groups', + json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_endpoint_groups(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) @@ -355,12 +396,13 @@ class _DomainAndProjectUserEndpointGroupTests(object): '/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_endpoint_groups(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) @@ -369,50 +411,54 @@ class _DomainAndProjectUserEndpointGroupTests(object): c.delete( '/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_add_endpoint_group_to_project(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.put('/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' - % (endpoint_group['id'], project['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN - ) + c.put( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' + % (endpoint_group['id'], project['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_cannot_remove_endpoint_group_from_project(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.delete('/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' - % (endpoint_group['id'], project['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN - ) + c.delete( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' + % (endpoint_group['id'], project['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserEndpointGroupsTests, - _SystemReaderAndMemberUserEndpointGroupsTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserEndpointGroupsTests, + _SystemReaderAndMemberUserEndpointGroupsTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -423,16 +469,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -443,10 +488,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserEndpointGroupsTests, - _SystemReaderAndMemberUserEndpointGroupsTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserEndpointGroupsTests, + _SystemReaderAndMemberUserEndpointGroupsTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -457,16 +504,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -477,9 +523,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserEndpointGroupsTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserEndpointGroupsTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -493,7 +541,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -509,18 +557,21 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, 'id': uuid.uuid4().hex, 'description': uuid.uuid4().hex, 'filters': {'interface': 'public'}, - 'name': uuid.uuid4().hex + 'name': uuid.uuid4().hex, } } with self.test_client() as c: c.post( - '/v3/OS-EP-FILTER/endpoint_groups', json=create, - headers=self.headers) + '/v3/OS-EP-FILTER/endpoint_groups', + json=create, + headers=self.headers, + ) def test_user_can_update_endpoint_group(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) @@ -531,11 +582,13 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, c.patch( '/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], json=update, - headers=self.headers) + headers=self.headers, + ) def test_user_can_delete_endpoint_group(self): endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) @@ -543,49 +596,54 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( '/v3/OS-EP-FILTER/endpoint_groups/%s' % endpoint_group['id'], - headers=self.headers + headers=self.headers, ) def test_user_add_endpoint_group_to_project(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) with self.test_client() as c: - c.put('/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' - % (endpoint_group['id'], project['id']), - headers=self.headers - ) + c.put( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' + % (endpoint_group['id'], project['id']), + headers=self.headers, + ) def test_remove_endpoint_group_from_project(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) endpoint_group = unit.new_endpoint_group_ref( - filters={'interface': 'public'}) + filters={'interface': 'public'} + ) endpoint_group = PROVIDERS.catalog_api.create_endpoint_group( endpoint_group['id'], endpoint_group ) PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group['id'], project['id']) + endpoint_group['id'], project['id'] + ) with self.test_client() as c: - c.delete('/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' - % (endpoint_group['id'], project['id']), - headers=self.headers - ) + c.delete( + '/v3/OS-EP-FILTER/endpoint_groups/%s/projects/%s' + % (endpoint_group['id'], project['id']), + headers=self.headers, + ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserEndpointGroupTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserEndpointGroupTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -600,14 +658,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -618,9 +677,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserEndpointGroupTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserEndpointGroupTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -632,7 +693,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -644,9 +705,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserEndpointGroupTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserEndpointGroupTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -668,14 +730,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_endpoints.py b/keystone/tests/protection/v3/test_endpoints.py index 7de74da2b6..a32409c28c 100644 --- a/keystone/tests/protection/v3/test_endpoints.py +++ b/keystone/tests/protection/v3/test_endpoints.py @@ -67,14 +67,16 @@ class _SystemReaderAndMemberUserEndpointTests(object): 'endpoint': { 'interface': 'public', 'service_id': uuid.uuid4().hex, - 'url': 'https://' + uuid.uuid4().hex + '.com' + 'url': 'https://' + uuid.uuid4().hex + '.com', } } with self.test_client() as c: c.post( - '/v3/endpoints', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/endpoints', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_endpoints(self): @@ -90,9 +92,10 @@ class _SystemReaderAndMemberUserEndpointTests(object): with self.test_client() as c: c.patch( - '/v3/endpoints/%s' % endpoint['id'], json=update, + '/v3/endpoints/%s' % endpoint['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_endpoints(self): @@ -106,8 +109,9 @@ class _SystemReaderAndMemberUserEndpointTests(object): with self.test_client() as c: c.delete( - '/v3/endpoints/%s' % endpoint['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/endpoints/%s' % endpoint['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -118,14 +122,16 @@ class _DomainAndProjectUserEndpointTests(object): 'endpoint': { 'interface': 'public', 'service_id': uuid.uuid4().hex, - 'url': 'https://' + uuid.uuid4().hex + '.com' + 'url': 'https://' + uuid.uuid4().hex + '.com', } } with self.test_client() as c: c.post( - '/v3/endpoints', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/endpoints', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_endpoints(self): @@ -142,8 +148,9 @@ class _DomainAndProjectUserEndpointTests(object): with self.test_client() as c: c.get( - '/v3/endpoints', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/endpoints', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_an_endpoint(self): @@ -157,8 +164,9 @@ class _DomainAndProjectUserEndpointTests(object): with self.test_client() as c: c.get( - '/v3/endpoints/%s' % endpoint['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/endpoints/%s' % endpoint['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_endpoints(self): @@ -174,9 +182,10 @@ class _DomainAndProjectUserEndpointTests(object): with self.test_client() as c: c.patch( - '/v3/endpoints/%s' % endpoint['id'], json=update, + '/v3/endpoints/%s' % endpoint['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_endpoints(self): @@ -190,15 +199,18 @@ class _DomainAndProjectUserEndpointTests(object): with self.test_client() as c: c.delete( - '/v3/endpoints/%s' % endpoint['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/endpoints/%s' % endpoint['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserEndpointTests, - _SystemReaderAndMemberUserEndpointTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserEndpointTests, + _SystemReaderAndMemberUserEndpointTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -209,16 +221,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -229,10 +240,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserEndpointTests, - _SystemReaderAndMemberUserEndpointTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserEndpointTests, + _SystemReaderAndMemberUserEndpointTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -243,16 +256,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -263,9 +275,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserEndpointTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserEndpointTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -279,7 +293,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -297,7 +311,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, 'endpoint': { 'interface': 'public', 'service_id': service['id'], - 'url': 'https://' + uuid.uuid4().hex + '.com' + 'url': 'https://' + uuid.uuid4().hex + '.com', } } @@ -317,8 +331,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/endpoints/%s' % endpoint['id'], json=update, - headers=self.headers + '/v3/endpoints/%s' % endpoint['id'], + json=update, + headers=self.headers, ) def test_user_can_delete_endpoints(self): @@ -332,13 +347,16 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/endpoints/%s' % endpoint['id'], headers=self.headers, + '/v3/endpoints/%s' % endpoint['id'], + headers=self.headers, ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserEndpointTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserEndpointTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -353,14 +371,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -371,9 +390,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserEndpointTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserEndpointTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -385,7 +406,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -397,9 +418,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserEndpointTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserEndpointTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -421,14 +443,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_grants.py b/keystone/tests/protection/v3/test_grants.py index bb74b09014..d841b327b9 100644 --- a/keystone/tests/protection/v3/test_grants.py +++ b/keystone/tests/protection/v3/test_grants.py @@ -36,20 +36,20 @@ class _SystemUserGrantTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: r = c.get( '/v3/projects/%s/users/%s/roles' % (project['id'], user['id']), - headers=self.headers + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) @@ -63,14 +63,15 @@ class _SystemUserGrantTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) with self.test_client() as c: r = c.get( '/v3/domains/%s/users/%s/roles' % (domain['id'], user['id']), - headers=self.headers + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) @@ -80,21 +81,21 @@ class _SystemUserGrantTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: r = c.get( - '/v3/projects/%s/groups/%s/roles' % ( - project['id'], group['id']), - headers=self.headers + '/v3/projects/%s/groups/%s/roles' + % (project['id'], group['id']), + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) @@ -108,14 +109,15 @@ class _SystemUserGrantTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain['id'], ) with self.test_client() as c: r = c.get( '/v3/domains/%s/groups/%s/roles' % (domain['id'], group['id']), - headers=self.headers + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) @@ -125,23 +127,26 @@ class _SystemUserGrantTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_can_check_grant_for_user_on_domain(self): @@ -154,17 +159,17 @@ class _SystemUserGrantTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.get( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain['id'], user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain['id'], user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_can_check_grant_for_group_on_project(self): @@ -173,25 +178,26 @@ class _SystemUserGrantTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_can_check_grant_for_group_on_domain(self): @@ -204,17 +210,21 @@ class _SystemUserGrantTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.get( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain['id'], group['id'], self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + domain['id'], + group['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) @@ -226,18 +236,20 @@ class _SystemMemberAndReaderGrantTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.put( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_user_on_domain(self): @@ -251,11 +263,10 @@ class _SystemMemberAndReaderGrantTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain['id'], user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain['id'], user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_group_on_project(self): @@ -264,20 +275,20 @@ class _SystemMemberAndReaderGrantTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.put( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_group_on_domain(self): @@ -291,11 +302,14 @@ class _SystemMemberAndReaderGrantTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain['id'], group['id'], self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + domain['id'], + group['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_user_on_project(self): @@ -304,23 +318,26 @@ class _SystemMemberAndReaderGrantTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_user_on_domain(self): @@ -333,17 +350,17 @@ class _SystemMemberAndReaderGrantTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.delete( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain['id'], user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain['id'], user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_group_on_project(self): @@ -352,25 +369,26 @@ class _SystemMemberAndReaderGrantTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_group_on_domain(self): @@ -383,17 +401,21 @@ class _SystemMemberAndReaderGrantTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.delete( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain['id'], group['id'], self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + domain['id'], + group['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -409,14 +431,15 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: r = c.get( '/v3/projects/%s/users/%s/roles' % (project['id'], user['id']), - headers=self.headers + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) @@ -426,14 +449,15 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=self.domain_id, ) with self.test_client() as c: r = c.get( '/v3/domains/%s/users/%s/roles' % (self.domain_id, user['id']), - headers=self.headers + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) @@ -447,15 +471,16 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: r = c.get( - '/v3/projects/%s/groups/%s/roles' % ( - project['id'], group['id']), - headers=self.headers + '/v3/projects/%s/groups/%s/roles' + % (project['id'], group['id']), + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) @@ -465,15 +490,16 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=self.domain_id, ) with self.test_client() as c: r = c.get( - '/v3/domains/%s/groups/%s/roles' % ( - self.domain_id, group['id'] - ), headers=self.headers + '/v3/domains/%s/groups/%s/roles' + % (self.domain_id, group['id']), + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) @@ -483,23 +509,25 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=self.domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_can_check_grant_for_user_on_domain(self): @@ -508,18 +536,21 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=self.domain_id, ) with self.test_client() as c: c.get( - '/v3/domains/%s/users/%s/roles/%s' % ( - self.domain_id, user['id'], - self.bootstrapper.reader_role_id + '/v3/domains/%s/users/%s/roles/%s' + % ( + self.domain_id, + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_can_check_grant_for_group_on_project(self): @@ -532,19 +563,21 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_can_check_grant_for_group_on_domain(self): @@ -553,21 +586,26 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=self.domain_id, ) with self.test_client() as c: c.get( - '/v3/domains/%s/groups/%s/roles/%s' % ( - self.domain_id, group['id'], - self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + self.domain_id, + group['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) - def test_cannot_list_grants_for_user_other_domain_on_project_own_domain(self): # noqa: E501 + def test_cannot_list_grants_for_user_other_domain_on_project_own_domain( + self, + ): # noqa: E501 user_domain_id = CONF.identity.default_domain_id project_domain_id = self.domain_id @@ -580,18 +618,21 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( '/v3/projects/%s/users/%s/roles' % (project['id'], user['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_list_grants_for_user_own_domain_on_project_other_domain(self): # noqa: E501 + def test_cannot_list_grants_for_user_own_domain_on_project_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id @@ -600,20 +641,20 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=project_domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( '/v3/projects/%s/users/%s/roles' % (project['id'], user['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_list_grants_for_user_own_domain_on_other_domain(self): @@ -625,15 +666,16 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain_id, ) with self.test_client() as c: c.get( '/v3/domains/%s/users/%s/roles' % (domain_id, user['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_list_grants_for_user_other_domain_on_own_domain(self): @@ -645,18 +687,21 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain_id, ) with self.test_client() as c: c.get( '/v3/domains/%s/users/%s/roles' % (domain_id, user['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_list_grants_for_group_other_domain_on_project_own_domain(self): # noqa: E501 + def test_cannot_list_grants_for_group_other_domain_on_project_own_domain( + self, + ): # noqa: E501 group_domain_id = CONF.identity.default_domain_id project_domain_id = self.domain_id @@ -669,19 +714,22 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/groups/%s/roles' % ( - project['id'], group['id']), + '/v3/projects/%s/groups/%s/roles' + % (project['id'], group['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_list_grants_for_group_own_domain_on_project_other_domain(self): # noqa: E501 + def test_cannot_list_grants_for_group_own_domain_on_project_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id @@ -690,21 +738,21 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=project_domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/groups/%s/roles' % ( - project['id'], group['id']), + '/v3/projects/%s/groups/%s/roles' + % (project['id'], group['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_list_grants_for_group_own_domain_on_other_domain(self): @@ -716,16 +764,16 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain_id, ) with self.test_client() as c: c.get( - '/v3/domains/%s/groups/%s/roles' % ( - domain_id, group['id']), + '/v3/domains/%s/groups/%s/roles' % (domain_id, group['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_list_grants_for_group_other_domain_on_own_domain(self): @@ -737,19 +785,21 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain_id, ) with self.test_client() as c: c.get( - '/v3/domains/%s/groups/%s/roles' % ( - domain_id, group['id']), + '/v3/domains/%s/groups/%s/roles' % (domain_id, group['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_check_grant_for_user_other_domain_on_project_own_domain(self): # noqa: E501 + def test_cannot_check_grant_for_user_other_domain_on_project_own_domain( + self, + ): # noqa: E501 user_domain_id = CONF.identity.default_domain_id project_domain_id = self.domain_id @@ -762,20 +812,26 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], - self.bootstrapper.reader_role_id), + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_check_grant_for_user_own_domain_on_project_other_domain(self): # noqa: E501 + def test_cannot_check_grant_for_user_own_domain_on_project_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id @@ -784,39 +840,44 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=project_domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], - self.bootstrapper.reader_role_id), + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_check_grant_for_user_own_domain_on_project_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_check_grant_for_user_own_domain_on_project_own_domain_with_role_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id project_domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) user = PROVIDERS.identity_api.create_user( unit.new_user_ref(domain_id=user_domain_id) ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=project_domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) # NOTE(cmurphy) the grant for a domain-specific role cannot be created @@ -826,11 +887,10 @@ class _DomainUserTests(object): with self.test_client() as c: c.get( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], - role['id']), + '/v3/projects/%s/users/%s/roles/%s' + % (project['id'], user['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_check_grant_for_user_own_domain_on_other_domain(self): @@ -842,18 +902,17 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain_id, ) with self.test_client() as c: c.get( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], - self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_check_grant_for_user_other_domain_on_own_domain(self): @@ -865,28 +924,29 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain_id, ) with self.test_client() as c: c.get( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], - self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_check_grant_for_user_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_check_grant_for_user_own_domain_on_own_domain_with_role_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, - unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) user = PROVIDERS.identity_api.create_user( unit.new_user_ref(domain_id=user_domain_id) @@ -899,15 +959,15 @@ class _DomainUserTests(object): with self.test_client() as c: c.get( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], - role['id'] - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_check_grant_for_group_other_domain_on_project_own_domain(self): # noqa: E501 + def test_cannot_check_grant_for_group_other_domain_on_project_own_domain( + self, + ): # noqa: E501 group_domain_id = CONF.identity.default_domain_id project_domain_id = self.domain_id @@ -920,20 +980,26 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/groups/%s/roles/%s' % ( - project['id'], group['id'], - self.bootstrapper.reader_role_id), + '/v3/projects/%s/groups/%s/roles/%s' + % ( + project['id'], + group['id'], + self.bootstrapper.reader_role_id, + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_check_grant_for_group_own_domain_on_project_other_domain(self): # noqa: E501 + def test_cannot_check_grant_for_group_own_domain_on_project_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id @@ -946,27 +1012,33 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/projects/%s/groups/%s/roles/%s' % ( - project['id'], group['id'], - self.bootstrapper.reader_role_id), + '/v3/projects/%s/groups/%s/roles/%s' + % ( + project['id'], + group['id'], + self.bootstrapper.reader_role_id, + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_check_grant_for_group_own_domain_on_project_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_check_grant_for_group_own_domain_on_project_own_domain_with_role_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, - unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) group = PROVIDERS.identity_api.create_group( unit.new_group_ref(domain_id=group_domain_id) @@ -983,11 +1055,10 @@ class _DomainUserTests(object): with self.test_client() as c: c.get( - '/v3/projects/%s/groups/%s/roles/%s' % ( - project['id'], group['id'], - role['id']), + '/v3/projects/%s/groups/%s/roles/%s' + % (project['id'], group['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_check_grant_for_group_own_domain_on_other_domain(self): @@ -999,17 +1070,17 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain_id, ) with self.test_client() as c: c.get( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], - self.bootstrapper.reader_role_id), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_check_grant_for_group_other_domain_on_own_domain(self): @@ -1021,26 +1092,29 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain_id, ) with self.test_client() as c: c.get( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], - self.bootstrapper.reader_role_id), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_check_grant_for_group_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_check_grant_for_group_own_domain_on_own_domain_with_role_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) group = PROVIDERS.identity_api.create_group( unit.new_group_ref(domain_id=group_domain_id) @@ -1053,14 +1127,15 @@ class _DomainUserTests(object): with self.test_client() as c: c.get( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], - role['id']), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_create_grant_for_user_other_domain_on_project_own_domain(self): # noqa: E501 + def test_cannot_create_grant_for_user_other_domain_on_project_own_domain( + self, + ): # noqa: E501 user_domain_id = CONF.identity.default_domain_id project_domain_id = self.domain_id @@ -1069,21 +1144,24 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) with self.test_client() as c: c.put( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_create_grant_for_user_own_domain_on_project_other_domain(self): # noqa: E501 + def test_cannot_create_grant_for_user_own_domain_on_project_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id @@ -1092,44 +1170,45 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) with self.test_client() as c: c.put( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_create_grant_for_user_own_domain_on_project_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_create_grant_for_user_own_domain_on_project_own_domain_with_role_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id project_domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) user = PROVIDERS.identity_api.create_user( unit.new_user_ref(domain_id=user_domain_id) ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) with self.test_client() as c: c.put( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], role['id'] - ), + '/v3/projects/%s/users/%s/roles/%s' + % (project['id'], user['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_user_other_domain_on_own_domain(self): @@ -1142,11 +1221,10 @@ class _DomainUserTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_user_own_domain_on_other_domain(self): @@ -1159,20 +1237,22 @@ class _DomainUserTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_create_grant_for_user_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_create_grant_for_user_own_domain_on_own_domain_with_role_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) user = PROVIDERS.identity_api.create_user( unit.new_user_ref(domain_id=user_domain_id) @@ -1180,14 +1260,15 @@ class _DomainUserTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], role['id'] - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_create_grant_for_group_other_domain_on_project_own_domain(self): # noqa: E501 + def test_cannot_create_grant_for_group_other_domain_on_project_own_domain( + self, + ): # noqa: E501 group_domain_id = CONF.identity.default_domain_id project_domain_id = self.domain_id @@ -1196,23 +1277,24 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) with self.test_client() as c: c.put( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_create_grant_for_group_own_domain_on_project_other_domain(self): # noqa: E501 + def test_cannot_create_grant_for_group_own_domain_on_project_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id @@ -1221,49 +1303,46 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) with self.test_client() as c: c.put( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_create_grant_for_group_own_domain_on_project_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_create_grant_for_group_own_domain_on_project_own_domain_with_role_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id project_domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) group = PROVIDERS.identity_api.create_group( unit.new_group_ref(domain_id=group_domain_id) ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) with self.test_client() as c: c.put( - '/v3/projects/%s/groups/%s/roles/%s' % ( - project['id'], - group['id'], - role['id'] - ), + '/v3/projects/%s/groups/%s/roles/%s' + % (project['id'], group['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_group_other_domain_on_own_domain(self): @@ -1276,11 +1355,10 @@ class _DomainUserTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_group_own_domain_on_other_domain(self): @@ -1293,20 +1371,22 @@ class _DomainUserTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_create_grant_for_group_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_create_grant_for_group_own_domain_on_own_domain_with_role_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) group = PROVIDERS.identity_api.create_group( unit.new_group_ref(domain_id=group_domain_id) @@ -1314,14 +1394,15 @@ class _DomainUserTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], role['id'] - ), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_revoke_grant_from_user_other_domain_on_project_own_domain(self): # noqa: E501 + def test_cannot_revoke_grant_from_user_other_domain_on_project_own_domain( + self, + ): # noqa: E501 user_domain_id = CONF.identity.default_domain_id project_domain_id = self.domain_id @@ -1330,26 +1411,30 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_revoke_grant_from_user_own_domain_on_project_other_domain(self): # noqa: E501 + def test_cannot_revoke_grant_from_user_own_domain_on_project_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id @@ -1358,23 +1443,25 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_user_other_domain_on_own_domain(self): @@ -1386,17 +1473,17 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain_id, ) with self.test_client() as c: c.delete( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_user_own_domain_on_other_domain(self): @@ -1408,46 +1495,49 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain_id, ) with self.test_client() as c: c.delete( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_revoke_grant_from_user_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_revoke_grant_from_user_own_domain_on_own_domain_with_role_other_domain( + self, + ): # noqa: E501 user_domain_id = self.domain_id domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) user = PROVIDERS.identity_api.create_user( unit.new_user_ref(domain_id=user_domain_id) ) PROVIDERS.assignment_api.create_grant( - role['id'], user_id=user['id'], - domain_id=domain_id + role['id'], user_id=user['id'], domain_id=domain_id ) with self.test_client() as c: c.delete( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain_id, user['id'], role['id'] - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain_id, user['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_revoke_grant_from_group_other_domain_on_project_own_domain(self): # noqa: E501 + def test_cannot_revoke_grant_from_group_other_domain_on_project_own_domain( + self, + ): # noqa: E501 group_domain_id = CONF.identity.default_domain_id project_domain_id = self.domain_id @@ -1456,28 +1546,30 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_revoke_grant_from_group_own_domain_on_project_other_domain(self): # noqa: E501 + def test_cannot_revoke_grant_from_group_own_domain_on_project_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id project_domain_id = CONF.identity.default_domain_id @@ -1486,25 +1578,25 @@ class _DomainUserTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=project_domain_id - ) + uuid.uuid4().hex, unit.new_project_ref(domain_id=project_domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_group_other_domain_on_own_domain(self): @@ -1516,17 +1608,17 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain_id, ) with self.test_client() as c: c.delete( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_group_own_domain_on_other_domain(self): @@ -1538,51 +1630,53 @@ class _DomainUserTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain_id + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain_id, ) with self.test_client() as c: c.delete( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) - def test_cannot_revoke_grant_from_group_own_domain_on_own_domain_with_role_other_domain(self): # noqa: E501 + def test_cannot_revoke_grant_from_group_own_domain_on_own_domain_with_role_other_domain( + self, + ): # noqa: E501 group_domain_id = self.domain_id domain_id = self.domain_id role_domain_id = CONF.identity.default_domain_id role = PROVIDERS.role_api.create_role( - uuid.uuid4().hex, - unit.new_role_ref(domain_id=role_domain_id)) + uuid.uuid4().hex, unit.new_role_ref(domain_id=role_domain_id) + ) group = PROVIDERS.identity_api.create_group( unit.new_group_ref(domain_id=group_domain_id) ) PROVIDERS.assignment_api.create_grant( - role['id'], group_id=group['id'], - domain_id=domain_id + role['id'], group_id=group['id'], domain_id=domain_id ) with self.test_client() as c: c.delete( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain_id, group['id'], role['id'] - ), + '/v3/domains/%s/groups/%s/roles/%s' + % (domain_id, group['id'], role['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserGrantTests, - _SystemMemberAndReaderGrantTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserGrantTests, + _SystemMemberAndReaderGrantTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -1593,16 +1687,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -1613,10 +1706,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserGrantTests, - _SystemMemberAndReaderGrantTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserGrantTests, + _SystemMemberAndReaderGrantTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -1627,16 +1722,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -1647,9 +1741,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserGrantTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserGrantTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -1661,7 +1757,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -1677,17 +1773,19 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.put( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_create_grant_for_user_on_domain(self): @@ -1701,10 +1799,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain['id'], user['id'], self.bootstrapper.reader_role_id - ), - headers=self.headers + '/v3/domains/%s/users/%s/roles/%s' + % (domain['id'], user['id'], self.bootstrapper.reader_role_id), + headers=self.headers, ) def test_can_create_grant_for_group_on_project(self): @@ -1713,19 +1810,19 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.put( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_create_grant_for_group_on_domain(self): @@ -1739,10 +1836,13 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain['id'], group['id'], self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + domain['id'], + group['id'], + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_revoke_grant_from_user_on_project(self): @@ -1751,22 +1851,25 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_revoke_grant_from_user_on_domain(self): @@ -1779,16 +1882,16 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.delete( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain['id'], user['id'], self.bootstrapper.reader_role_id - ), - headers=self.headers + '/v3/domains/%s/users/%s/roles/%s' + % (domain['id'], user['id'], self.bootstrapper.reader_role_id), + headers=self.headers, ) def test_can_revoke_grant_from_group_on_project(self): @@ -1797,24 +1900,25 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_revoke_grant_from_group_on_domain(self): @@ -1827,16 +1931,20 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.delete( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain['id'], group['id'], self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + domain['id'], + group['id'], + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) @@ -1848,18 +1956,20 @@ class _DomainMemberAndReaderTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.put( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_user_on_domain(self): @@ -1873,11 +1983,10 @@ class _DomainMemberAndReaderTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain['id'], user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain['id'], user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_group_on_project(self): @@ -1891,13 +2000,14 @@ class _DomainMemberAndReaderTests(object): with self.test_client() as c: c.put( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_create_grant_for_group_on_domain(self): @@ -1911,11 +2021,14 @@ class _DomainMemberAndReaderTests(object): with self.test_client() as c: c.put( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain['id'], group['id'], self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + domain['id'], + group['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_user_on_project(self): @@ -1928,17 +2041,21 @@ class _DomainMemberAndReaderTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_user_on_domain(self): @@ -1951,17 +2068,17 @@ class _DomainMemberAndReaderTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.delete( - '/v3/domains/%s/users/%s/roles/%s' % ( - domain['id'], user['id'], self.bootstrapper.reader_role_id - ), + '/v3/domains/%s/users/%s/roles/%s' + % (domain['id'], user['id'], self.bootstrapper.reader_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_group_on_project(self): @@ -1970,25 +2087,26 @@ class _DomainMemberAndReaderTests(object): ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, unit.new_project_ref( - domain_id=CONF.identity.default_domain_id - ) + uuid.uuid4().hex, + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_cannot_revoke_grant_from_group_on_domain(self): @@ -2001,24 +2119,30 @@ class _DomainMemberAndReaderTests(object): ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.delete( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain['id'], group['id'], self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + domain['id'], + group['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class DomainReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserTests, - _DomainMemberAndReaderTests): +class DomainReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserTests, + _DomainMemberAndReaderTests, +): def setUp(self): super(DomainReaderTests, self).setUp() @@ -2033,13 +2157,15 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, domain_user = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -2050,10 +2176,12 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserTests, - _DomainMemberAndReaderTests): +class DomainMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserTests, + _DomainMemberAndReaderTests, +): def setUp(self): super(DomainMemberTests, self).setUp() @@ -2067,13 +2195,15 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, domain_user = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -2084,9 +2214,11 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserTests): +class DomainAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserTests, +): def setUp(self): super(DomainAdminTests, self).setUp() @@ -2108,14 +2240,15 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -2139,7 +2272,7 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, 'identity:list_grants': gp.SYSTEM_READER_OR_DOMAIN_READER_LIST, 'identity:check_grant': gp.SYSTEM_READER_OR_DOMAIN_READER, 'identity:create_grant': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, - 'identity:revoke_grant': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN + 'identity:revoke_grant': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, } f.write(jsonutils.dumps(overridden_policies)) @@ -2154,10 +2287,13 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_create_grant_for_user_own_domain_on_own_domain(self): @@ -2167,11 +2303,13 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/domains/%s/users/%s/roles/%s' % ( - self.domain_id, user['id'], - self.bootstrapper.reader_role_id + '/v3/domains/%s/users/%s/roles/%s' + % ( + self.domain_id, + user['id'], + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_create_grant_for_group_on_project(self): @@ -2185,12 +2323,13 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_create_grant_for_group_own_domain_on_own_domain(self): @@ -2200,11 +2339,13 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/domains/%s/groups/%s/roles/%s' % ( - self.domain_id, group['id'], - self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + self.domain_id, + group['id'], + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_revoke_grant_from_user_on_project(self): @@ -2217,16 +2358,20 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/users/%s/roles/%s' % ( - project['id'], user['id'], self.bootstrapper.reader_role_id + '/v3/projects/%s/users/%s/roles/%s' + % ( + project['id'], + user['id'], + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_can_revoke_grant_from_group_on_project(self): @@ -2239,18 +2384,20 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + project_id=project['id'], ) with self.test_client() as c: c.delete( - '/v3/projects/%s/groups/%s/roles/%s' % ( + '/v3/projects/%s/groups/%s/roles/%s' + % ( project['id'], group['id'], - self.bootstrapper.reader_role_id + self.bootstrapper.reader_role_id, ), - headers=self.headers + headers=self.headers, ) def test_cannot_revoke_grant_from_group_on_domain(self): @@ -2263,15 +2410,19 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, group_id=group['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + group_id=group['id'], + domain_id=domain['id'], ) with self.test_client() as c: c.delete( - '/v3/domains/%s/groups/%s/roles/%s' % ( - domain['id'], group['id'], self.bootstrapper.reader_role_id + '/v3/domains/%s/groups/%s/roles/%s' + % ( + domain['id'], + group['id'], + self.bootstrapper.reader_role_id, ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) diff --git a/keystone/tests/protection/v3/test_groups.py b/keystone/tests/protection/v3/test_groups.py index f91e34ceb9..35c22b6374 100644 --- a/keystone/tests/protection/v3/test_groups.py +++ b/keystone/tests/protection/v3/test_groups.py @@ -90,9 +90,7 @@ class _SystemUserGroupTests(object): PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - r = c.get( - '/v3/users/%s/groups' % user['id'], headers=self.headers - ) + r = c.get('/v3/users/%s/groups' % user['id'], headers=self.headers) self.assertEqual(1, len(r.json['groups'])) self.assertEqual(group['id'], r.json['groups'][0]['id']) @@ -113,14 +111,15 @@ class _SystemUserGroupTests(object): c.get( '/v3/groups/%s/users/%s' % (group['id'], user['id']), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_cannot_get_non_existent_group_not_found(self): with self.test_client() as c: c.get( - '/v3/groups/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + '/v3/groups/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, ) @@ -133,16 +132,15 @@ class _SystemAndDomainMemberAndReaderGroupTests(object): ) create = { - 'group': { - 'name': uuid.uuid4().hex, - 'domain_id': domain['id'] - } + 'group': {'name': uuid.uuid4().hex, 'domain_id': domain['id']} } with self.test_client() as c: c.post( - '/v3/groups', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/groups', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_group(self): @@ -157,9 +155,10 @@ class _SystemAndDomainMemberAndReaderGroupTests(object): with self.test_client() as c: c.patch( - '/v3/groups/%s' % group['id'], json=update, + '/v3/groups/%s' % group['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_group(self): @@ -172,8 +171,9 @@ class _SystemAndDomainMemberAndReaderGroupTests(object): with self.test_client() as c: c.delete( - '/v3/groups/%s' % group['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/groups/%s' % group['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_add_users_to_group(self): @@ -191,7 +191,7 @@ class _SystemAndDomainMemberAndReaderGroupTests(object): c.put( '/v3/groups/%s/users/%s' % (group['id'], user['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_remove_users_from_group(self): @@ -211,14 +211,16 @@ class _SystemAndDomainMemberAndReaderGroupTests(object): c.delete( '/v3/groups/%s/users/%s' % (group['id'], user['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserGroupTests, - _SystemAndDomainMemberAndReaderGroupTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserGroupTests, + _SystemAndDomainMemberAndReaderGroupTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -229,16 +231,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -249,10 +250,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserGroupTests, - _SystemAndDomainMemberAndReaderGroupTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserGroupTests, + _SystemAndDomainMemberAndReaderGroupTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -263,16 +266,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -283,9 +285,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserGroupTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserGroupTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -297,7 +301,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -313,10 +317,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) create = { - 'group': { - 'name': uuid.uuid4().hex, - 'domain_id': domain['id'] - } + 'group': {'name': uuid.uuid4().hex, 'domain_id': domain['id']} } with self.test_client() as c: @@ -334,8 +335,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/groups/%s' % group['id'], json=update, - headers=self.headers + '/v3/groups/%s' % group['id'], + json=update, + headers=self.headers, ) def test_user_can_delete_group(self): @@ -347,9 +349,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) with self.test_client() as c: - c.delete( - '/v3/groups/%s' % group['id'], headers=self.headers - ) + c.delete('/v3/groups/%s' % group['id'], headers=self.headers) def test_user_can_add_users_to_group(self): domain = PROVIDERS.resource_api.create_domain( @@ -365,7 +365,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( '/v3/groups/%s/users/%s' % (group['id'], user['id']), - headers=self.headers + headers=self.headers, ) def test_user_can_remove_users_from_group(self): @@ -384,7 +384,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( '/v3/groups/%s/users/%s' % (group['id'], user['id']), - headers=self.headers + headers=self.headers, ) @@ -419,8 +419,9 @@ class _DomainUserGroupTests(object): unit.new_group_ref(domain_id=domain['id']) ) with self.test_client() as c: - r = c.get('/v3/groups?domain_id=%s' % domain['id'], - headers=self.headers) + r = c.get( + '/v3/groups?domain_id=%s' % domain['id'], headers=self.headers + ) self.assertEqual(0, len(r.json['groups'])) def test_user_can_get_group_in_domain(self): @@ -428,8 +429,7 @@ class _DomainUserGroupTests(object): unit.new_group_ref(domain_id=self.domain_id) ) with self.test_client() as c: - r = c.get('/v3/groups/%s' % group['id'], - headers=self.headers) + r = c.get('/v3/groups/%s' % group['id'], headers=self.headers) self.assertEqual(group['id'], r.json['group']['id']) def test_user_cannot_get_group_in_other_domain(self): @@ -440,15 +440,18 @@ class _DomainUserGroupTests(object): unit.new_group_ref(domain_id=domain['id']) ) with self.test_client() as c: - c.get('/v3/groups/%s' % group['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/groups/%s' % group['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_non_existent_group_forbidden(self): with self.test_client() as c: c.get( - '/v3/groups/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/groups/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_list_groups_in_domain_for_user_in_domain(self): @@ -460,8 +463,7 @@ class _DomainUserGroupTests(object): ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - r = c.get('/v3/users/%s/groups' % user['id'], - headers=self.headers) + r = c.get('/v3/users/%s/groups' % user['id'], headers=self.headers) self.assertEqual(1, len(r.json['groups'])) self.assertEqual(group['id'], r.json['groups'][0]['id']) @@ -477,15 +479,19 @@ class _DomainUserGroupTests(object): ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - c.get('/v3/users/%s/groups' % user['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/users/%s/groups' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_list_groups_for_non_existent_user_forbidden(self): with self.test_client() as c: - c.get('/v3/users/%s/groups' % uuid.uuid4().hex, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/users/%s/groups' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_list_groups_in_other_domain_user_in_own_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -505,8 +511,7 @@ class _DomainUserGroupTests(object): PROVIDERS.identity_api.add_user_to_group(user['id'], group1['id']) PROVIDERS.identity_api.add_user_to_group(user['id'], group2['id']) with self.test_client() as c: - r = c.get('/v3/users/%s/groups' % user['id'], - headers=self.headers) + r = c.get('/v3/users/%s/groups' % user['id'], headers=self.headers) # only one group should be visible self.assertEqual(1, len(r.json['groups'])) self.assertEqual(group2['id'], r.json['groups'][0]['id']) @@ -520,8 +525,9 @@ class _DomainUserGroupTests(object): ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - r = c.get('/v3/groups/%s/users' % group['id'], - headers=self.headers) + r = c.get( + '/v3/groups/%s/users' % group['id'], headers=self.headers + ) self.assertEqual(1, len(r.json['users'])) self.assertEqual(user['id'], r.json['users'][0]['id']) @@ -543,8 +549,9 @@ class _DomainUserGroupTests(object): PROVIDERS.identity_api.add_user_to_group(user1['id'], group['id']) PROVIDERS.identity_api.add_user_to_group(user2['id'], group['id']) with self.test_client() as c: - r = c.get('/v3/groups/%s/users' % group['id'], - headers=self.headers) + r = c.get( + '/v3/groups/%s/users' % group['id'], headers=self.headers + ) # only one user should be visible self.assertEqual(1, len(r.json['users'])) self.assertEqual(user2['id'], r.json['users'][0]['id']) @@ -561,15 +568,19 @@ class _DomainUserGroupTests(object): ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - c.get('/v3/groups/%s/users' % group['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/groups/%s/users' % group['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_list_users_in_non_existent_group_forbidden(self): with self.test_client() as c: - c.get('/v3/groups/%s/users' % uuid.uuid4().hex, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/groups/%s/users' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_can_check_user_in_own_domain_group_in_own_domain(self): user = PROVIDERS.identity_api.create_user( @@ -580,14 +591,18 @@ class _DomainUserGroupTests(object): ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - c.head('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.NO_CONTENT) - c.get('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.NO_CONTENT) + c.head( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) + c.get( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) def test_user_cannot_check_user_in_other_domain_group_in_own_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -601,20 +616,26 @@ class _DomainUserGroupTests(object): ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - c.head('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) - c.get('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.head( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) + c.get( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) -class DomainReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserGroupTests, - _SystemAndDomainMemberAndReaderGroupTests): +class DomainReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserGroupTests, + _SystemAndDomainMemberAndReaderGroupTests, +): def setUp(self): super(DomainReaderTests, self).setUp() @@ -636,14 +657,15 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -666,20 +688,19 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, overridden_policies = { 'identity:get_group': gp.SYSTEM_READER_OR_DOMAIN_READER, 'identity:list_groups': gp.SYSTEM_READER_OR_DOMAIN_READER, - 'identity:list_groups_for_user': - gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER, - 'identity:list_users_in_group': - gp.SYSTEM_READER_OR_DOMAIN_READER, - 'identity:check_user_in_group': - gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER + 'identity:list_groups_for_user': gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER, + 'identity:list_users_in_group': gp.SYSTEM_READER_OR_DOMAIN_READER, + 'identity:check_user_in_group': gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER, } f.write(jsonutils.dumps(overridden_policies)) -class DomainMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserGroupTests, - _SystemAndDomainMemberAndReaderGroupTests): +class DomainMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserGroupTests, + _SystemAndDomainMemberAndReaderGroupTests, +): def setUp(self): super(DomainMemberTests, self).setUp() @@ -701,14 +722,15 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -731,19 +753,18 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, overridden_policies = { 'identity:get_group': gp.SYSTEM_READER_OR_DOMAIN_READER, 'identity:list_groups': gp.SYSTEM_READER_OR_DOMAIN_READER, - 'identity:list_groups_for_user': - gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER, - 'identity:list_users_in_group': - gp.SYSTEM_READER_OR_DOMAIN_READER, - 'identity:check_user_in_group': - gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER + 'identity:list_groups_for_user': gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER, + 'identity:list_users_in_group': gp.SYSTEM_READER_OR_DOMAIN_READER, + 'identity:check_user_in_group': gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER, } f.write(jsonutils.dumps(overridden_policies)) -class DomainAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserGroupTests): +class DomainAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserGroupTests, +): def setUp(self): super(DomainAdminTests, self).setUp() @@ -765,14 +786,15 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -795,28 +817,20 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, overridden_policies = { 'identity:get_group': gp.SYSTEM_READER_OR_DOMAIN_READER, 'identity:list_groups': gp.SYSTEM_READER_OR_DOMAIN_READER, - 'identity:list_groups_for_user': - gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER, + 'identity:list_groups_for_user': gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_USER_OR_OWNER, 'identity:create_group': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, 'identity:update_group': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, 'identity:delete_group': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, - 'identity:list_users_in_group': - gp.SYSTEM_READER_OR_DOMAIN_READER, - 'identity:remove_user_from_group': - gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN_FOR_TARGET_GROUP_USER, - 'identity:check_user_in_group': - gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER, - 'identity:add_user_to_group': - gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN_FOR_TARGET_GROUP_USER + 'identity:list_users_in_group': gp.SYSTEM_READER_OR_DOMAIN_READER, + 'identity:remove_user_from_group': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN_FOR_TARGET_GROUP_USER, + 'identity:check_user_in_group': gp.SYSTEM_READER_OR_DOMAIN_READER_FOR_TARGET_GROUP_USER, + 'identity:add_user_to_group': gp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN_FOR_TARGET_GROUP_USER, } f.write(jsonutils.dumps(overridden_policies)) def test_user_can_create_group_for_own_domain(self): create = { - 'group': { - 'name': uuid.uuid4().hex, - 'domain_id': self.domain_id - } + 'group': {'name': uuid.uuid4().hex, 'domain_id': self.domain_id} } with self.test_client() as c: @@ -828,15 +842,16 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) create = { - 'group': { - 'name': uuid.uuid4().hex, - 'domain_id': domain['id'] - } + 'group': {'name': uuid.uuid4().hex, 'domain_id': domain['id']} } with self.test_client() as c: - c.post('/v3/groups', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.post( + '/v3/groups', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_can_update_group_in_own_domain(self): group = PROVIDERS.identity_api.create_group( @@ -846,8 +861,10 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, update = {'group': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/groups/%s' % group['id'], json=update, - headers=self.headers) + '/v3/groups/%s' % group['id'], + json=update, + headers=self.headers, + ) def test_user_cannot_update_group_in_other_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -860,9 +877,10 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, update = {'group': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/groups/%s' % group['id'], json=update, + '/v3/groups/%s' % group['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_delete_group_in_own_domain(self): @@ -870,10 +888,7 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, unit.new_group_ref(domain_id=self.domain_id) ) with self.test_client() as c: - c.delete( - '/v3/groups/%s' % group['id'], - headers=self.headers - ) + c.delete('/v3/groups/%s' % group['id'], headers=self.headers) def test_user_cannot_delete_group_in_other_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -886,7 +901,7 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, c.delete( '/v3/groups/%s' % group['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_remove_user_in_own_domain_from_group_in_own_domain(self): @@ -898,9 +913,11 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - c.delete('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers) + c.delete( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + ) def test_user_cannot_remove_user_other_domain_from_group_own_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -914,10 +931,12 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - c.delete('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_remove_user_own_domain_from_group_other_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -931,30 +950,36 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) PROVIDERS.identity_api.add_user_to_group(user['id'], group['id']) with self.test_client() as c: - c.delete('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_remove_non_existent_user_from_group_forbidden(self): group = PROVIDERS.identity_api.create_group( unit.new_group_ref(domain_id=self.domain_id) ) with self.test_client() as c: - c.delete('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': uuid.uuid4().hex}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': uuid.uuid4().hex}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_remove_user_from_non_existent_group_forbidden(self): user = PROVIDERS.identity_api.create_user( unit.new_user_ref(domain_id=self.domain_id) ) with self.test_client() as c: - c.delete('/v3/groups/%(group)s/users/%(user)s' % { - 'group': uuid.uuid4().hex, 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': uuid.uuid4().hex, 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_can_add_user_in_own_domain_to_group_in_own_domain(self): group = PROVIDERS.identity_api.create_group( @@ -964,9 +989,11 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, unit.new_user_ref(domain_id=self.domain_id) ) with self.test_client() as c: - c.put('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers) + c.put( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + ) def test_user_cannot_add_user_other_domain_to_group_own_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -979,10 +1006,12 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, unit.new_user_ref(domain_id=domain['id']) ) with self.test_client() as c: - c.put('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.put( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_add_user_own_domain_to_group_other_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -995,34 +1024,41 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, unit.new_user_ref(domain_id=self.domain_id) ) with self.test_client() as c: - c.put('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.put( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_add_non_existent_user_to_group_forbidden(self): group = PROVIDERS.identity_api.create_group( unit.new_group_ref(domain_id=self.domain_id) ) with self.test_client() as c: - c.put('/v3/groups/%(group)s/users/%(user)s' % { - 'group': group['id'], 'user': uuid.uuid4().hex}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.put( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': group['id'], 'user': uuid.uuid4().hex}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_add_user_from_non_existent_group_forbidden(self): user = PROVIDERS.identity_api.create_user( unit.new_user_ref(domain_id=self.domain_id) ) with self.test_client() as c: - c.put('/v3/groups/%(group)s/users/%(user)s' % { - 'group': uuid.uuid4().hex, 'user': user['id']}, - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.put( + '/v3/groups/%(group)s/users/%(user)s' + % {'group': uuid.uuid4().hex, 'user': user['id']}, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, common_auth.AuthTestMixin +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -1042,14 +1078,15 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=project['id'], ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=project['id'] + project_id=project['id'], ) # Grab a token using the persona we're testing and prepare headers @@ -1088,8 +1125,9 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.get( - '/v3/users/%s/groups' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s/groups' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_groups(self): @@ -1102,8 +1140,9 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.get( - '/v3/groups', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/groups', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_a_group(self): @@ -1116,8 +1155,9 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.get( - '/v3/groups/%s' % group['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/groups/%s' % group['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_group_members(self): @@ -1135,8 +1175,9 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.get( - '/v3/groups/%s/users' % group['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/groups/%s/users' % group['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_check_if_user_in_group(self): @@ -1156,12 +1197,13 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, c.get( '/v3/groups/%s/users/%s' % (group['id'], user['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_non_existent_group_forbidden(self): with self.test_client() as c: c.get( - '/v3/groups/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/groups/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) diff --git a/keystone/tests/protection/v3/test_identity_providers.py b/keystone/tests/protection/v3/test_identity_providers.py index 8c42d3483a..0b36013e15 100644 --- a/keystone/tests/protection/v3/test_identity_providers.py +++ b/keystone/tests/protection/v3/test_identity_providers.py @@ -51,7 +51,7 @@ class _SystemUserIdentityProviderTests(object): with self.test_client() as c: c.get( '/v3/OS-FEDERATION/identity_providers/%s' % idp['id'], - headers=self.headers + headers=self.headers, ) @@ -64,8 +64,9 @@ class _SystemReaderAndMemberIdentityProviderTests(object): with self.test_client() as c: c.put( '/v3/OS-FEDERATION/identity_providers/%s' % uuid.uuid4().hex, - json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_identity_providers(self): @@ -78,8 +79,9 @@ class _SystemReaderAndMemberIdentityProviderTests(object): with self.test_client() as c: c.patch( '/v3/OS-FEDERATION/identity_providers/%s' % idp['id'], - json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_identity_providers(self): @@ -91,7 +93,7 @@ class _SystemReaderAndMemberIdentityProviderTests(object): c.delete( '/v3/OS-FEDERATION/identity_providers/%s' % idp['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -104,8 +106,9 @@ class _DomainAndProjectUserIdentityProviderTests(object): with self.test_client() as c: c.put( '/v3/OS-FEDERATION/identity_providers/%s' % uuid.uuid4().hex, - json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_identity_providers(self): @@ -118,8 +121,9 @@ class _DomainAndProjectUserIdentityProviderTests(object): with self.test_client() as c: c.patch( '/v3/OS-FEDERATION/identity_providers/%s' % idp['id'], - json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_identity_providers(self): @@ -129,8 +133,9 @@ class _DomainAndProjectUserIdentityProviderTests(object): with self.test_client() as c: c.get( - '/v3/OS-FEDERATION/identity_providers', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/OS-FEDERATION/identity_providers', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_an_identity_provider(self): @@ -142,7 +147,7 @@ class _DomainAndProjectUserIdentityProviderTests(object): c.get( '/v3/OS-FEDERATION/identity_providers/%s' % idp['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_identity_providers(self): @@ -154,14 +159,16 @@ class _DomainAndProjectUserIdentityProviderTests(object): c.delete( '/v3/OS-FEDERATION/identity_providers/%s' % idp['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserIdentityProviderTests, - _SystemReaderAndMemberIdentityProviderTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserIdentityProviderTests, + _SystemReaderAndMemberIdentityProviderTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -172,16 +179,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -192,10 +198,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserIdentityProviderTests, - _SystemReaderAndMemberIdentityProviderTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserIdentityProviderTests, + _SystemReaderAndMemberIdentityProviderTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -206,16 +214,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -226,9 +233,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserIdentityProviderTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserIdentityProviderTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -242,7 +251,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -258,8 +267,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( '/v3/OS-FEDERATION/identity_providers/%s' % uuid.uuid4().hex, - json=create, headers=self.headers, - expected_status_code=http.client.CREATED + json=create, + headers=self.headers, + expected_status_code=http.client.CREATED, ) def test_user_can_update_identity_providers(self): @@ -272,7 +282,8 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( '/v3/OS-FEDERATION/identity_providers/%s' % idp['id'], - json=update, headers=self.headers + json=update, + headers=self.headers, ) def test_user_can_delete_identity_providers(self): @@ -283,13 +294,15 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( '/v3/OS-FEDERATION/identity_providers/%s' % idp['id'], - headers=self.headers + headers=self.headers, ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserIdentityProviderTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserIdentityProviderTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -304,14 +317,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -322,9 +336,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserIdentityProviderTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserIdentityProviderTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -336,7 +352,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -348,9 +364,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserIdentityProviderTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserIdentityProviderTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -372,14 +389,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_implied_roles.py b/keystone/tests/protection/v3/test_implied_roles.py index f27e648151..eefc416cce 100644 --- a/keystone/tests/protection/v3/test_implied_roles.py +++ b/keystone/tests/protection/v3/test_implied_roles.py @@ -37,36 +37,42 @@ class _SystemUserImpliedRoleTests(object): """Common default functionality for all system users.""" def test_user_can_list_implied_roles(self): - PROVIDERS.role_api.create_implied_role(self.prior_role_id, - self.implied_role_id) + PROVIDERS.role_api.create_implied_role( + self.prior_role_id, self.implied_role_id + ) with self.test_client() as c: - r = c.get('/v3/roles/%s/implies' % self.prior_role_id, - headers=self.headers) + r = c.get( + '/v3/roles/%s/implies' % self.prior_role_id, + headers=self.headers, + ) self.assertEqual(1, len(r.json['role_inference']['implies'])) def test_user_can_get_an_implied_role(self): - PROVIDERS.role_api.create_implied_role(self.prior_role_id, - self.implied_role_id) + PROVIDERS.role_api.create_implied_role( + self.prior_role_id, self.implied_role_id + ) with self.test_client() as c: c.get( - '/v3/roles/%s/implies/%s' % ( - self.prior_role_id, self.implied_role_id), - headers=self.headers) - c.head( - '/v3/roles/%s/implies/%s' % ( - self.prior_role_id, self.implied_role_id), + '/v3/roles/%s/implies/%s' + % (self.prior_role_id, self.implied_role_id), headers=self.headers, - expected_status_code=http.client.NO_CONTENT) + ) + c.head( + '/v3/roles/%s/implies/%s' + % (self.prior_role_id, self.implied_role_id), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) def test_user_can_list_role_inference_rules(self): - PROVIDERS.role_api.create_implied_role(self.prior_role_id, - self.implied_role_id) + PROVIDERS.role_api.create_implied_role( + self.prior_role_id, self.implied_role_id + ) with self.test_client() as c: - r = c.get('/v3/role_inferences', - headers=self.headers) + r = c.get('/v3/role_inferences', headers=self.headers) # There should be three role inferences: two from the defaults and # one from the test setup self.assertEqual(3, len(r.json['role_inferences'])) @@ -78,30 +84,33 @@ class _SystemReaderAndMemberImpliedRoleTests(object): def test_user_cannot_create_implied_roles(self): with self.test_client() as c: c.put( - '/v3/roles/%s/implies/%s' % ( - self.prior_role_id, self.implied_role_id), + '/v3/roles/%s/implies/%s' + % (self.prior_role_id, self.implied_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_implied_roles(self): - PROVIDERS.role_api.create_implied_role(self.prior_role_id, - self.implied_role_id) + PROVIDERS.role_api.create_implied_role( + self.prior_role_id, self.implied_role_id + ) with self.test_client() as c: c.delete( - '/v3/roles/%s/implies/%s' % ( - self.prior_role_id, self.implied_role_id), + '/v3/roles/%s/implies/%s' + % (self.prior_role_id, self.implied_role_id), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ImpliedRolesSetupMixin, - _SystemUserImpliedRoleTests, - _SystemReaderAndMemberImpliedRoleTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ImpliedRolesSetupMixin, + _SystemUserImpliedRoleTests, + _SystemReaderAndMemberImpliedRoleTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -114,16 +123,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -134,11 +142,13 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ImpliedRolesSetupMixin, - _SystemUserImpliedRoleTests, - _SystemReaderAndMemberImpliedRoleTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ImpliedRolesSetupMixin, + _SystemUserImpliedRoleTests, + _SystemReaderAndMemberImpliedRoleTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -151,16 +161,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -171,10 +180,12 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ImpliedRolesSetupMixin, - _SystemUserImpliedRoleTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ImpliedRolesSetupMixin, + _SystemUserImpliedRoleTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -190,7 +201,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -203,19 +214,20 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_can_create_implied_roles(self): with self.test_client() as c: c.put( - '/v3/roles/%s/implies/%s' % ( - self.prior_role_id, self.implied_role_id), + '/v3/roles/%s/implies/%s' + % (self.prior_role_id, self.implied_role_id), headers=self.headers, - expected_status_code=http.client.CREATED + expected_status_code=http.client.CREATED, ) def test_user_can_delete_implied_roles(self): - PROVIDERS.role_api.create_implied_role(self.prior_role_id, - self.implied_role_id) + PROVIDERS.role_api.create_implied_role( + self.prior_role_id, self.implied_role_id + ) with self.test_client() as c: c.delete( - '/v3/roles/%s/implies/%s' % ( - self.prior_role_id, self.implied_role_id), - headers=self.headers + '/v3/roles/%s/implies/%s' + % (self.prior_role_id, self.implied_role_id), + headers=self.headers, ) diff --git a/keystone/tests/protection/v3/test_limits.py b/keystone/tests/protection/v3/test_limits.py index dd1e579773..5c4e9fb05f 100644 --- a/keystone/tests/protection/v3/test_limits.py +++ b/keystone/tests/protection/v3/test_limits.py @@ -37,17 +37,17 @@ def _create_limits_and_dependencies(domain_id=None): registered_limit = unit.new_registered_limit_ref( service_id=service['id'], id=uuid.uuid4().hex ) - registered_limits = ( - PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit] - ) + registered_limits = PROVIDERS.unified_limit_api.create_registered_limits( + [registered_limit] ) registered_limit = registered_limits[0] domain_limit = unit.new_limit_ref( - domain_id=domain_id, service_id=service['id'], + domain_id=domain_id, + service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=10, id=uuid.uuid4().hex + resource_limit=10, + id=uuid.uuid4().hex, ) project = PROVIDERS.resource_api.create_project( @@ -55,9 +55,11 @@ def _create_limits_and_dependencies(domain_id=None): ) project_limit = unit.new_limit_ref( - project_id=project['id'], service_id=service['id'], + project_id=project['id'], + service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=5, id=uuid.uuid4().hex + resource_limit=5, + id=uuid.uuid4().hex, ) limits = PROVIDERS.unified_limit_api.create_limits( [domain_limit, project_limit] @@ -118,23 +120,26 @@ class _UserLimitTests(object): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) create = { 'limits': [ unit.new_limit_ref( - project_id=project['id'], service_id=service['id'], + project_id=project['id'], + service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=5 + resource_limit=5, ) ] } with self.test_client() as c: c.post( - '/v3/limits', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_limits(self): @@ -144,9 +149,10 @@ class _UserLimitTests(object): with self.test_client() as c: c.patch( - '/v3/limits/%s' % limit_id, json=update, + '/v3/limits/%s' % limit_id, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_limits(self): @@ -156,13 +162,15 @@ class _UserLimitTests(object): c.delete( '/v3/limits/%s' % limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserLimitTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserLimitTests, +): def setUp(self): super(SystemReaderTests, self).setUp() self.loadapp() @@ -172,16 +180,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -192,9 +199,11 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserLimitTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserLimitTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -205,16 +214,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -225,8 +233,9 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, common_auth.AuthTestMixin +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -240,7 +249,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -287,15 +296,16 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) create = { 'limits': [ unit.new_limit_ref( - project_id=project['id'], service_id=service['id'], + project_id=project['id'], + service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=5 + resource_limit=5, ) ] } @@ -310,8 +320,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/limits/%s' % limit_id, json=update, - headers=self.headers + '/v3/limits/%s' % limit_id, json=update, headers=self.headers ) def test_user_can_delete_limits(self): @@ -321,8 +330,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, c.delete('/v3/limits/%s' % limit_id, headers=self.headers) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, common_auth.AuthTestMixin +): def setUp(self): super(DomainUserTests, self).setUp() @@ -337,14 +347,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -376,8 +387,9 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.get( - '/v3/limits/%s' % project_limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % project_limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_domain_limits_for_other_domain(self): @@ -385,8 +397,9 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.get( - '/v3/limits/%s' % domain_limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % domain_limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_list_limits_within_domain(self): @@ -429,17 +442,20 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, create = { 'limits': [ unit.new_limit_ref( - domain_id=self.domain_id, service_id=service['id'], + domain_id=self.domain_id, + service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=5 + resource_limit=5, ) ] } with self.test_client() as c: c.post( - '/v3/limits', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_limits_for_other_domain(self): @@ -463,15 +479,17 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_id=CONF.identity.default_domain_id, service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=5 + resource_limit=5, ) ] } with self.test_client() as c: c.post( - '/v3/limits', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_limits_for_projects_in_domain(self): @@ -499,15 +517,17 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, project_id=project['id'], service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=5 + resource_limit=5, ) ] } with self.test_client() as c: c.post( - '/v3/limits', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_limits_for_projects_outside_domain(self): @@ -527,7 +547,7 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) create = { @@ -536,15 +556,17 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, project_id=project['id'], service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=5 + resource_limit=5, ) ] } with self.test_client() as c: c.post( - '/v3/limits', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_limits_for_domain(self): @@ -556,9 +578,10 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/limits/%s' % domain_limit_id, json=update, + '/v3/limits/%s' % domain_limit_id, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_limits_for_other_domain(self): @@ -568,9 +591,10 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/limits/%s' % domain_limit_id, json=update, + '/v3/limits/%s' % domain_limit_id, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_limits_for_projects_in_domain(self): @@ -582,8 +606,10 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/limits/%s' % project_limit_id, headers=self.headers, - json=update, expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % project_limit_id, + headers=self.headers, + json=update, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_limits_for_projects_outside_domain(self): @@ -593,8 +619,10 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/limits/%s' % project_limit_id, headers=self.headers, - json=update, expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % project_limit_id, + headers=self.headers, + json=update, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_limits_for_domain(self): @@ -604,8 +632,9 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/limits/%s' % domain_limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % domain_limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_limits_for_other_domain(self): @@ -613,8 +642,9 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/limits/%s' % domain_limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % domain_limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_limits_for_projects_in_domain(self): @@ -624,8 +654,9 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/limits/%s' % project_limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % project_limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_limits_for_projects_outside_domain(self): @@ -633,13 +664,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/limits/%s' % project_limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % project_limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, common_auth.AuthTestMixin +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -653,7 +686,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -671,13 +704,14 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, # project if they actually have a role assignment on the project and # call the API with a project-scoped token. PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=limit['project_id'] + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=limit['project_id'], ) auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=limit['project_id'] + project_id=limit['project_id'], ) with self.test_client() as c: r = c.post('/v3/auth/tokens', json=auth) @@ -692,8 +726,9 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.get( - '/v3/limits/%s' % project_limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % project_limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_domain_limit(self): @@ -701,8 +736,9 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.get( - '/v3/limits/%s' % domain_limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits/%s' % domain_limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_list_limits(self): @@ -713,13 +749,14 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, # project if they actually have a role assignment on the project and # call the API with a project-scoped token. PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=limit['project_id'] + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=limit['project_id'], ) auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=limit['project_id'] + project_id=limit['project_id'], ) with self.test_client() as c: r = c.post('/v3/auth/tokens', json=auth) @@ -759,23 +796,26 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) create = { 'limits': [ unit.new_limit_ref( - project_id=project['id'], service_id=service['id'], + project_id=project['id'], + service_id=service['id'], resource_name=registered_limit['resource_name'], - resource_limit=5 + resource_limit=5, ) ] } with self.test_client() as c: c.post( - '/v3/limits', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/limits', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_limits(self): @@ -785,9 +825,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/limits/%s' % limit_id, json=update, + '/v3/limits/%s' % limit_id, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_limits(self): @@ -797,7 +838,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, c.delete( '/v3/limits/%s' % limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) diff --git a/keystone/tests/protection/v3/test_mappings.py b/keystone/tests/protection/v3/test_mappings.py index 98532af7ce..a1af1b6a73 100644 --- a/keystone/tests/protection/v3/test_mappings.py +++ b/keystone/tests/protection/v3/test_mappings.py @@ -48,7 +48,7 @@ class _SystemUserMappingTests(object): with self.test_client() as c: c.get( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], - headers=self.headers + headers=self.headers, ) @@ -59,19 +59,22 @@ class _SystemReaderAndMemberUserMappingTests(object): create = { 'mapping': { 'id': uuid.uuid4().hex, - 'rules': [{ - 'local': [{'user': {'name': '{0}'}}], - 'remote': [{'type': 'UserName'}], - }] + 'rules': [ + { + 'local': [{'user': {'name': '{0}'}}], + 'remote': [{'type': 'UserName'}], + } + ], } } mapping_id = create['mapping']['id'] with self.test_client() as c: c.put( - '/v3/OS-FEDERATION/mappings/%s' % mapping_id, json=create, + '/v3/OS-FEDERATION/mappings/%s' % mapping_id, + json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_mappings(self): @@ -82,18 +85,21 @@ class _SystemReaderAndMemberUserMappingTests(object): update = { 'mapping': { - 'rules': [{ - 'local': [{'user': {'name': '{0}'}}], - 'remote': [{'type': 'UserName'}], - }] + 'rules': [ + { + 'local': [{'user': {'name': '{0}'}}], + 'remote': [{'type': 'UserName'}], + } + ] } } with self.test_client() as c: c.patch( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], - json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_mappings(self): @@ -106,7 +112,7 @@ class _SystemReaderAndMemberUserMappingTests(object): c.delete( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -116,19 +122,22 @@ class _DomainAndProjectUserMappingTests(object): create = { 'mapping': { 'id': uuid.uuid4().hex, - 'rules': [{ - 'local': [{'user': {'name': '{0}'}}], - 'remote': [{'type': 'UserName'}], - }] + 'rules': [ + { + 'local': [{'user': {'name': '{0}'}}], + 'remote': [{'type': 'UserName'}], + } + ], } } mapping_id = create['mapping']['id'] with self.test_client() as c: c.put( - '/v3/OS-FEDERATION/mappings/%s' % mapping_id, json=create, + '/v3/OS-FEDERATION/mappings/%s' % mapping_id, + json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_mappings(self): @@ -139,8 +148,9 @@ class _DomainAndProjectUserMappingTests(object): with self.test_client() as c: c.get( - '/v3/OS-FEDERATION/mappings', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/OS-FEDERATION/mappings', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_a_mapping(self): @@ -153,7 +163,7 @@ class _DomainAndProjectUserMappingTests(object): c.get( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_mappings(self): @@ -164,18 +174,21 @@ class _DomainAndProjectUserMappingTests(object): update = { 'mapping': { - 'rules': [{ - 'local': [{'user': {'name': '{0}'}}], - 'remote': [{'type': 'UserName'}], - }] + 'rules': [ + { + 'local': [{'user': {'name': '{0}'}}], + 'remote': [{'type': 'UserName'}], + } + ] } } with self.test_client() as c: c.patch( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], - json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_mappings(self): @@ -188,14 +201,16 @@ class _DomainAndProjectUserMappingTests(object): c.delete( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserMappingTests, - _SystemReaderAndMemberUserMappingTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserMappingTests, + _SystemReaderAndMemberUserMappingTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -206,16 +221,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -229,19 +243,22 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, create = { 'mapping': { 'id': uuid.uuid4().hex, - 'rules': [{ - 'local': [{'user': {'name': '{0}'}}], - 'remote': [{'type': 'UserName'}], - }] + 'rules': [ + { + 'local': [{'user': {'name': '{0}'}}], + 'remote': [{'type': 'UserName'}], + } + ], } } mapping_id = create['mapping']['id'] with self.test_client() as c: c.put( - '/v3/OS-FEDERATION/mappings/%s' % mapping_id, json=create, + '/v3/OS-FEDERATION/mappings/%s' % mapping_id, + json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_mappings(self): @@ -252,18 +269,21 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, update = { 'mapping': { - 'rules': [{ - 'local': [{'user': {'name': '{0}'}}], - 'remote': [{'type': 'UserName'}], - }] + 'rules': [ + { + 'local': [{'user': {'name': '{0}'}}], + 'remote': [{'type': 'UserName'}], + } + ] } } with self.test_client() as c: c.patch( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], - json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_mappings(self): @@ -276,14 +296,16 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, c.delete( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserMappingTests, - _SystemReaderAndMemberUserMappingTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserMappingTests, + _SystemReaderAndMemberUserMappingTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -294,16 +316,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -314,9 +335,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserMappingTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserMappingTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -330,7 +353,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -344,18 +367,22 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, create = { 'mapping': { 'id': uuid.uuid4().hex, - 'rules': [{ - 'local': [{'user': {'name': '{0}'}}], - 'remote': [{'type': 'UserName'}], - }] + 'rules': [ + { + 'local': [{'user': {'name': '{0}'}}], + 'remote': [{'type': 'UserName'}], + } + ], } } mapping_id = create['mapping']['id'] with self.test_client() as c: c.put( - '/v3/OS-FEDERATION/mappings/%s' % mapping_id, json=create, - headers=self.headers, expected_status_code=http.client.CREATED + '/v3/OS-FEDERATION/mappings/%s' % mapping_id, + json=create, + headers=self.headers, + expected_status_code=http.client.CREATED, ) def test_user_can_update_mappings(self): @@ -366,17 +393,20 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, update = { 'mapping': { - 'rules': [{ - 'local': [{'user': {'name': '{0}'}}], - 'remote': [{'type': 'UserName'}], - }] + 'rules': [ + { + 'local': [{'user': {'name': '{0}'}}], + 'remote': [{'type': 'UserName'}], + } + ] } } with self.test_client() as c: c.patch( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], - json=update, headers=self.headers + json=update, + headers=self.headers, ) def test_user_can_delete_mappings(self): @@ -388,13 +418,15 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( '/v3/OS-FEDERATION/mappings/%s' % mapping['id'], - headers=self.headers + headers=self.headers, ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserMappingTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserMappingTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -409,14 +441,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -427,9 +460,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserMappingTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserMappingTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -441,7 +476,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -453,9 +488,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserMappingTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserMappingTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -477,14 +513,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_policy.py b/keystone/tests/protection/v3/test_policy.py index 9b11fce20f..ff6585a50a 100644 --- a/keystone/tests/protection/v3/test_policy.py +++ b/keystone/tests/protection/v3/test_policy.py @@ -46,8 +46,7 @@ class _SystemUserPoliciesTests(object): policy = PROVIDERS.policy_api.create_policy(policy['id'], policy) with self.test_client() as c: - c.get('/v3/policies/%s' % policy['id'], - headers=self.headers) + c.get('/v3/policies/%s' % policy['id'], headers=self.headers) class _SystemReaderAndMemberPoliciesTests(object): @@ -60,13 +59,19 @@ class _SystemReaderAndMemberPoliciesTests(object): 'description': uuid.uuid4().hex, 'enabled': True, # Store serialized JSON data as the blob to mimic real world usage. - 'blob': json.dumps({'data': uuid.uuid4().hex, }), + 'blob': json.dumps( + { + 'data': uuid.uuid4().hex, + } + ), 'type': uuid.uuid4().hex, } with self.test_client() as c: c.post( - '/v3/policies', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/policies', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_policy(self): @@ -77,9 +82,10 @@ class _SystemReaderAndMemberPoliciesTests(object): with self.test_client() as c: c.patch( - '/v3/policies/%s' % policy['id'], json=update, + '/v3/policies/%s' % policy['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_policy(self): @@ -88,8 +94,9 @@ class _SystemReaderAndMemberPoliciesTests(object): with self.test_client() as c: c.delete( - '/v3/policies/%s' % policy['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/policies/%s' % policy['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -100,16 +107,22 @@ class _DomainAndProjectUserPolicyTests(object): policy = PROVIDERS.policy_api.create_policy(policy['id'], policy) with self.test_client() as c: - c.get('/v3/policies', headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/policies', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_policy(self): policy = unit.new_policy_ref() policy = PROVIDERS.policy_api.create_policy(policy['id'], policy) with self.test_client() as c: - c.get('/v3/policies/%s' % policy['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/policies/%s' % policy['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_create_policy(self): create = { @@ -118,13 +131,19 @@ class _DomainAndProjectUserPolicyTests(object): 'description': uuid.uuid4().hex, 'enabled': True, # Store serialized JSON data as the blob to mimic real world usage. - 'blob': json.dumps({'data': uuid.uuid4().hex, }), + 'blob': json.dumps( + { + 'data': uuid.uuid4().hex, + } + ), 'type': uuid.uuid4().hex, } with self.test_client() as c: c.post( - '/v3/policies', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/policies', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_policy(self): @@ -135,9 +154,10 @@ class _DomainAndProjectUserPolicyTests(object): with self.test_client() as c: c.patch( - '/v3/policies/%s' % policy['id'], json=update, + '/v3/policies/%s' % policy['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_policy(self): @@ -146,15 +166,18 @@ class _DomainAndProjectUserPolicyTests(object): with self.test_client() as c: c.delete( - '/v3/policies/%s' % policy['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/policies/%s' % policy['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserPoliciesTests, - _SystemReaderAndMemberPoliciesTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserPoliciesTests, + _SystemReaderAndMemberPoliciesTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -165,16 +188,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -185,10 +207,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserPoliciesTests, - _SystemReaderAndMemberPoliciesTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserPoliciesTests, + _SystemReaderAndMemberPoliciesTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -199,16 +223,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -219,9 +242,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserPoliciesTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserPoliciesTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -235,7 +260,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -254,14 +279,16 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, 'enabled': True, # Store serialized JSON data as the blob to mimic real world # usage. - 'blob': json.dumps({'data': uuid.uuid4().hex, }), - 'type': uuid.uuid4().hex + 'blob': json.dumps( + { + 'data': uuid.uuid4().hex, + } + ), + 'type': uuid.uuid4().hex, } } with self.test_client() as c: - c.post( - '/v3/policies', json=create, headers=self.headers - ) + c.post('/v3/policies', json=create, headers=self.headers) def test_user_can_update_policy(self): policy = unit.new_policy_ref() @@ -271,8 +298,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/policies/%s' % policy['id'], json=update, - headers=self.headers + '/v3/policies/%s' % policy['id'], + json=update, + headers=self.headers, ) def test_user_can_delete_policy(self): @@ -280,14 +308,14 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, policy = PROVIDERS.policy_api.create_policy(policy['id'], policy) with self.test_client() as c: - c.delete( - '/v3/policies/%s' % policy['id'], headers=self.headers - ) + c.delete('/v3/policies/%s' % policy['id'], headers=self.headers) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserPolicyTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserPolicyTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -302,14 +330,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -320,9 +349,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserPolicyTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserPolicyTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -334,7 +365,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -346,9 +377,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserPolicyTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserPolicyTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -370,14 +402,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_policy_association.py b/keystone/tests/protection/v3/test_policy_association.py index 773a8e63b0..a1ab966bd3 100644 --- a/keystone/tests/protection/v3/test_policy_association.py +++ b/keystone/tests/protection/v3/test_policy_association.py @@ -41,13 +41,16 @@ class _SystemUserPoliciesAssociationTests(object): ) PROVIDERS.endpoint_policy_api.create_policy_association( - policy['id'], endpoint['id']) + policy['id'], endpoint['id'] + ) with self.test_client() as c: - c.get('/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' - % (policy['id'], endpoint['id']), - headers=self.headers, - expected_status_code=http.client.NO_CONTENT) + c.get( + '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' + % (policy['id'], endpoint['id']), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) def test_user_can_check_policy_association_for_service(self): policy = unit.new_policy_ref() @@ -58,13 +61,16 @@ class _SystemUserPoliciesAssociationTests(object): ) PROVIDERS.endpoint_policy_api.create_policy_association( - policy['id'], service_id=service['id']) + policy['id'], service_id=service['id'] + ) with self.test_client() as c: - c.get('/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' - % (policy['id'], service['id']), - headers=self.headers, - expected_status_code=http.client.NO_CONTENT) + c.get( + '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' + % (policy['id'], service['id']), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) def test_user_can_check_policy_association_for_region_and_service(self): policy = unit.new_policy_ref() @@ -81,10 +87,12 @@ class _SystemUserPoliciesAssociationTests(object): ) with self.test_client() as c: - c.get('/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' - % (policy['id'], service['id'], region['id']), - headers=self.headers, - expected_status_code=http.client.NO_CONTENT) + c.get( + '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' + % (policy['id'], service['id'], region['id']), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) def test_user_can_get_policy_for_endpoint(self): policy = unit.new_policy_ref() @@ -100,9 +108,11 @@ class _SystemUserPoliciesAssociationTests(object): policy['id'], endpoint['id'] ) with self.test_client() as c: - c.get('/v3/endpoints/%s/OS-ENDPOINT-POLICY/policy' - % (endpoint['id']), - headers=self.headers) + c.get( + '/v3/endpoints/%s/OS-ENDPOINT-POLICY/policy' + % (endpoint['id']), + headers=self.headers, + ) def test_user_list_endpoints_for_policy(self): policy = unit.new_policy_ref() @@ -118,9 +128,11 @@ class _SystemUserPoliciesAssociationTests(object): policy['id'], endpoint['id'] ) with self.test_client() as c: - r = c.get('/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints' - % (policy['id']), - headers=self.headers) + r = c.get( + '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints' + % (policy['id']), + headers=self.headers, + ) for endpoint_itr in r.json['endpoints']: self.assertIn(endpoint['id'], endpoint_itr['id']) @@ -143,7 +155,7 @@ class _SystemReaderAndMemberPoliciesAssociationTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' % (policy['id'], endpoint['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_policy_association_for_endpoint(self): @@ -162,7 +174,7 @@ class _SystemReaderAndMemberPoliciesAssociationTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' % (policy['id'], endpoint['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_policy_association_for_service(self): @@ -176,7 +188,7 @@ class _SystemReaderAndMemberPoliciesAssociationTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' % (policy['id'], service['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_policy_association_for_service(self): @@ -191,7 +203,7 @@ class _SystemReaderAndMemberPoliciesAssociationTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' % (policy['id'], service['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_policy_assoc_for_region_and_service(self): @@ -207,7 +219,7 @@ class _SystemReaderAndMemberPoliciesAssociationTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' % (policy['id'], service['id'], region['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_policy_assoc_for_region_and_service(self): @@ -223,7 +235,7 @@ class _SystemReaderAndMemberPoliciesAssociationTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' % (policy['id'], service['id'], region['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -242,13 +254,16 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): ) PROVIDERS.endpoint_policy_api.create_policy_association( - policy['id'], endpoint['id']) + policy['id'], endpoint['id'] + ) with self.test_client() as c: - c.get('/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' - % (policy['id'], endpoint['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' + % (policy['id'], endpoint['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_check_policy_association_for_service(self): policy = unit.new_policy_ref() @@ -259,13 +274,16 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): ) PROVIDERS.endpoint_policy_api.create_policy_association( - policy['id'], service_id=service['id']) + policy['id'], service_id=service['id'] + ) with self.test_client() as c: - c.get('/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' - % (policy['id'], service['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' + % (policy['id'], service['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_check_policy_association_for_region_and_service(self): policy = unit.new_policy_ref() @@ -282,10 +300,12 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): ) with self.test_client() as c: - c.get('/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' - % (policy['id'], service['id'], region['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' + % (policy['id'], service['id'], region['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_get_policy_for_endpoint(self): policy = unit.new_policy_ref() @@ -301,10 +321,12 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): policy['id'], endpoint['id'] ) with self.test_client() as c: - c.get('/v3/endpoints/%s/OS-ENDPOINT-POLICY/policy' - % (endpoint['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/endpoints/%s/OS-ENDPOINT-POLICY/policy' + % (endpoint['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_list_endpoints_for_policy(self): policy = unit.new_policy_ref() @@ -320,10 +342,12 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): policy['id'], endpoint['id'] ) with self.test_client() as c: - c.get('/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints' - % (policy['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN - ) + c.get( + '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints' + % (policy['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_create_policy_association_for_endpoint(self): policy = unit.new_policy_ref() @@ -341,7 +365,7 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' % (policy['id'], endpoint['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_policy_association_for_endpoint(self): @@ -360,7 +384,7 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' % (policy['id'], endpoint['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_policy_association_for_service(self): @@ -374,7 +398,7 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' % (policy['id'], service['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_policy_association_for_service(self): @@ -389,7 +413,7 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' % (policy['id'], service['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_policy_assoc_for_region_and_service(self): @@ -405,7 +429,7 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' % (policy['id'], service['id'], region['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_policy_assoc_for_region_and_service(self): @@ -421,14 +445,16 @@ class _DomainAndProjectUserPolicyAssociationsTests(object): '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' % (policy['id'], service['id'], region['id']), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserPoliciesAssociationTests, - _SystemReaderAndMemberPoliciesAssociationTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserPoliciesAssociationTests, + _SystemReaderAndMemberPoliciesAssociationTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -439,16 +465,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -459,10 +484,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserPoliciesAssociationTests, - _SystemReaderAndMemberPoliciesAssociationTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserPoliciesAssociationTests, + _SystemReaderAndMemberPoliciesAssociationTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -473,16 +500,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -493,9 +519,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserPoliciesAssociationTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserPoliciesAssociationTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -509,7 +537,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -535,7 +563,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' % (policy['id'], endpoint['id']), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_delete_policy_association_for_endpoint(self): @@ -554,7 +582,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, '/v3/policies/%s/OS-ENDPOINT-POLICY/endpoints/%s' % (policy['id'], endpoint['id']), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_create_policy_association_for_service(self): @@ -568,7 +596,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' % (policy['id'], service['id']), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_delete_policy_association_for_service(self): @@ -583,7 +611,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s' % (policy['id'], service['id']), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_create_policy_association_for_region_and_service(self): @@ -599,7 +627,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' % (policy['id'], service['id'], region['id']), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_delete_policy_association_for_region_and_service(self): @@ -615,13 +643,15 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, '/v3/policies/%s/OS-ENDPOINT-POLICY/services/%s/regions/%s' % (policy['id'], service['id'], region['id']), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserPolicyAssociationsTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserPolicyAssociationsTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -636,14 +666,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -654,9 +685,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserPolicyAssociationsTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserPolicyAssociationsTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -668,7 +701,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -680,9 +713,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserPolicyAssociationsTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserPolicyAssociationsTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -704,14 +738,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_project_endpoint.py b/keystone/tests/protection/v3/test_project_endpoint.py index 1a50d9aba4..f8f770be72 100644 --- a/keystone/tests/protection/v3/test_project_endpoint.py +++ b/keystone/tests/protection/v3/test_project_endpoint.py @@ -34,7 +34,7 @@ class _SystemUserProjectEndpointTests(object): def test_user_can_list_projects_for_endpoint(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -46,18 +46,20 @@ class _SystemUserProjectEndpointTests(object): ) PROVIDERS.catalog_api.add_endpoint_to_project( - endpoint['id'], project['id']) + endpoint['id'], project['id'] + ) with self.test_client() as c: - r = c.get('/v3/OS-EP-FILTER/endpoints/%s/projects' - % endpoint['id'], - headers=self.headers) + r = c.get( + '/v3/OS-EP-FILTER/endpoints/%s/projects' % endpoint['id'], + headers=self.headers, + ) for project_itr in r.json['projects']: self.assertIn(project['id'], project_itr['id']) def test_user_can_check_endpoint_in_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -69,17 +71,20 @@ class _SystemUserProjectEndpointTests(object): ) PROVIDERS.catalog_api.add_endpoint_to_project( - endpoint['id'], project['id']) + endpoint['id'], project['id'] + ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/projects/%s/endpoints/%s' - % (project['id'], endpoint['id']), - headers=self.headers, - expected_status_code=http.client.NO_CONTENT) + c.get( + '/v3/OS-EP-FILTER/projects/%s/endpoints/%s' + % (project['id'], endpoint['id']), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) def test_user_can_list_endpoints_for_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -91,10 +96,13 @@ class _SystemUserProjectEndpointTests(object): ) PROVIDERS.catalog_api.add_endpoint_to_project( - endpoint['id'], project['id']) + endpoint['id'], project['id'] + ) with self.test_client() as c: - r = c.get('/v3/OS-EP-FILTER/projects/%s/endpoints' % project['id'], - headers=self.headers) + r = c.get( + '/v3/OS-EP-FILTER/projects/%s/endpoints' % project['id'], + headers=self.headers, + ) for endpoint_itr in r.json['endpoints']: self.assertIn(endpoint['id'], endpoint_itr['id']) @@ -104,7 +112,7 @@ class _SystemReaderAndMemberProjectEndpointTests(object): def test_user_cannot_add_endpoint_to_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -115,15 +123,17 @@ class _SystemReaderAndMemberProjectEndpointTests(object): endpoint['id'], endpoint ) with self.test_client() as c: - c.put('/v3/OS-EP-FILTER/projects/%s/endpoints/%s' - % (project['id'], endpoint['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.put( + '/v3/OS-EP-FILTER/projects/%s/endpoints/%s' + % (project['id'], endpoint['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_remove_endpoint_from_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -134,10 +144,12 @@ class _SystemReaderAndMemberProjectEndpointTests(object): endpoint['id'], endpoint ) with self.test_client() as c: - c.delete('/v3/OS-EP-FILTER/projects/%s/endpoints/%s' - % (project['id'], endpoint['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.delete( + '/v3/OS-EP-FILTER/projects/%s/endpoints/%s' + % (project['id'], endpoint['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) class _DomainAndProjectUserProjectEndpointTests(object): @@ -145,7 +157,7 @@ class _DomainAndProjectUserProjectEndpointTests(object): def test_user_cannot_list_projects_for_endpoint(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -157,16 +169,19 @@ class _DomainAndProjectUserProjectEndpointTests(object): ) PROVIDERS.catalog_api.add_endpoint_to_project( - endpoint['id'], project['id']) + endpoint['id'], project['id'] + ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/endpoints/%s/projects' % endpoint['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/endpoints/%s/projects' % endpoint['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_check_endpoint_in_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -178,17 +193,20 @@ class _DomainAndProjectUserProjectEndpointTests(object): ) PROVIDERS.catalog_api.add_endpoint_to_project( - endpoint['id'], project['id']) + endpoint['id'], project['id'] + ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/projects/%s/endpoints/%s' - % (project['id'], endpoint['id']), - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/projects/%s/endpoints/%s' + % (project['id'], endpoint['id']), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) def test_user_cannot_list_endpoints_for_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -200,17 +218,22 @@ class _DomainAndProjectUserProjectEndpointTests(object): ) PROVIDERS.catalog_api.add_endpoint_to_project( - endpoint['id'], project['id']) + endpoint['id'], project['id'] + ) with self.test_client() as c: - c.get('/v3/OS-EP-FILTER/projects/%s/endpoints' % project['id'], - headers=self.headers, - expected_status_code=http.client.FORBIDDEN) + c.get( + '/v3/OS-EP-FILTER/projects/%s/endpoints' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, + ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserProjectEndpointTests, - _SystemReaderAndMemberProjectEndpointTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserProjectEndpointTests, + _SystemReaderAndMemberProjectEndpointTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -221,16 +244,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -241,10 +263,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserProjectEndpointTests, - _SystemReaderAndMemberProjectEndpointTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserProjectEndpointTests, + _SystemReaderAndMemberProjectEndpointTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -255,16 +279,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -275,9 +298,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserProjectEndpointTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserProjectEndpointTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -291,7 +316,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -304,7 +329,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_can_add_endpoint_to_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -315,15 +340,17 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, endpoint['id'], endpoint ) with self.test_client() as c: - c.put('/v3/OS-EP-FILTER/projects/%s/endpoints/%s' - % (project['id'], endpoint['id']), - headers=self.headers, - expected_status_code=http.client.NO_CONTENT) + c.put( + '/v3/OS-EP-FILTER/projects/%s/endpoints/%s' + % (project['id'], endpoint['id']), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) def test_user_can_remove_endpoint_from_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) service = PROVIDERS.catalog_api.create_service( @@ -334,18 +361,23 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, endpoint['id'], endpoint ) PROVIDERS.catalog_api.add_endpoint_to_project( - endpoint['id'], project['id']) + endpoint['id'], project['id'] + ) with self.test_client() as c: - c.delete('/v3/OS-EP-FILTER/projects/%s/endpoints/%s' - % (project['id'], endpoint['id']), - headers=self.headers, - expected_status_code=http.client.NO_CONTENT) + c.delete( + '/v3/OS-EP-FILTER/projects/%s/endpoints/%s' + % (project['id'], endpoint['id']), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, + ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserProjectEndpointTests, - _SystemReaderAndMemberProjectEndpointTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserProjectEndpointTests, + _SystemReaderAndMemberProjectEndpointTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -360,14 +392,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -378,10 +411,12 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserProjectEndpointTests, - _SystemReaderAndMemberProjectEndpointTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserProjectEndpointTests, + _SystemReaderAndMemberProjectEndpointTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -393,7 +428,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -405,10 +440,11 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserProjectEndpointTests, - _SystemReaderAndMemberProjectEndpointTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserProjectEndpointTests, + _SystemReaderAndMemberProjectEndpointTests, +): def _override_policy(self): # TODO(cmurphy): Remove this once the deprecated policies in @@ -425,7 +461,7 @@ class ProjectUserTestsWithoutEnforceScope( 'identity:add_endpoint_to_project': bp.SYSTEM_ADMIN, 'identity:check_endpoint_in_project': bp.SYSTEM_READER, 'identity:list_endpoints_for_project': bp.SYSTEM_READER, - 'identity:remove_endpoint_from_project': bp.SYSTEM_ADMIN + 'identity:remove_endpoint_from_project': bp.SYSTEM_ADMIN, } f.write(jsonutils.dumps(overridden_policies)) @@ -455,14 +491,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_project_tags.py b/keystone/tests/protection/v3/test_project_tags.py index e62561c240..2b2df64bfb 100644 --- a/keystone/tests/protection/v3/test_project_tags.py +++ b/keystone/tests/protection/v3/test_project_tags.py @@ -56,7 +56,7 @@ def _override_policy(policy_file): ), 'identity:delete_project_tags': ( pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN_OR_PROJECT_ADMIN - ) + ), } f.write(jsonutils.dumps(overridden_policies)) @@ -65,7 +65,7 @@ class _SystemUserTests(object): def test_user_can_get_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -75,13 +75,13 @@ class _SystemUserTests(object): c.get( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_list_project_tags(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -100,7 +100,7 @@ class _SystemMemberAndReaderTagTests(object): def test_user_cannot_create_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -109,13 +109,13 @@ class _SystemMemberAndReaderTagTests(object): c.put( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -125,14 +125,16 @@ class _SystemMemberAndReaderTagTests(object): with self.test_client() as c: c.put( - '/v3/projects/%s/tags' % project['id'], headers=self.headers, - json=update, expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s/tags' % project['id'], + headers=self.headers, + json=update, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -142,7 +144,7 @@ class _SystemMemberAndReaderTagTests(object): c.delete( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -151,7 +153,7 @@ class _DomainAndProjectUserTagTests(object): def test_user_cannot_create_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -160,13 +162,13 @@ class _DomainAndProjectUserTagTests(object): c.put( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -176,14 +178,16 @@ class _DomainAndProjectUserTagTests(object): with self.test_client() as c: c.put( - '/v3/projects/%s/tags' % project['id'], headers=self.headers, - json=update, expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s/tags' % project['id'], + headers=self.headers, + json=update, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -193,14 +197,16 @@ class _DomainAndProjectUserTagTests(object): c.delete( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests, - _SystemMemberAndReaderTagTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, + _SystemMemberAndReaderTagTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -211,16 +217,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -231,10 +236,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests, - _SystemMemberAndReaderTagTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, + _SystemMemberAndReaderTagTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -245,16 +252,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -265,9 +271,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -279,7 +287,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -292,7 +300,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_can_create_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -301,13 +309,13 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, c.put( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.CREATED + expected_status_code=http.client.CREATED, ) def test_user_can_update_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -317,15 +325,16 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/projects/%s/tags' % project['id'], headers=self.headers, + '/v3/projects/%s/tags' % project['id'], + headers=self.headers, json=update, - expected_status_code=http.client.OK + expected_status_code=http.client.OK, ) def test_user_can_delete_project_tag(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -334,7 +343,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( '/v3/projects/%s/tags/%s' % (project['id'], tag), - headers=self.headers + headers=self.headers, ) @@ -352,7 +361,7 @@ class _DomainUserTagTests(object): c.get( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_list_tags_for_project_in_domain(self): @@ -373,7 +382,7 @@ class _DomainUserTagTests(object): def test_user_cannot_create_project_tag_outside_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -382,13 +391,13 @@ class _DomainUserTagTests(object): c.put( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_project_tag_outside_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -399,14 +408,16 @@ class _DomainUserTagTests(object): with self.test_client() as c: c.put( - '/v3/projects/%s/tags' % project['id'], headers=self.headers, - json=update, expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s/tags' % project['id'], + headers=self.headers, + json=update, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_project_tag_outside_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -416,13 +427,13 @@ class _DomainUserTagTests(object): c.delete( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_tag_for_project_outside_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -432,13 +443,13 @@ class _DomainUserTagTests(object): c.get( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_tags_for_project_outside_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -448,7 +459,7 @@ class _DomainUserTagTests(object): c.get( '/v3/projects/%s/tags' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -465,7 +476,7 @@ class _DomainMemberAndReaderTagTests(object): c.put( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_project_tag_in_domain(self): @@ -481,8 +492,10 @@ class _DomainMemberAndReaderTagTests(object): with self.test_client() as c: c.put( - '/v3/projects/%s/tags' % project['id'], headers=self.headers, - json=update, expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s/tags' % project['id'], + headers=self.headers, + json=update, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_project_tag_in_domain(self): @@ -497,13 +510,15 @@ class _DomainMemberAndReaderTagTests(object): c.delete( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class DomainAdminUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserTagTests): +class DomainAdminUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserTagTests, +): def setUp(self): super(DomainAdminUserTests, self).setUp() @@ -527,12 +542,14 @@ class DomainAdminUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], + user_id=self.user_id, + password=domain_admin['password'], domain_id=self.domain_id, ) @@ -553,7 +570,8 @@ class DomainAdminUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( '/v3/projects/%s/tags/%s' % (project['id'], tag), - headers=self.headers, expected_status_code=http.client.CREATED + headers=self.headers, + expected_status_code=http.client.CREATED, ) def test_user_can_update_project_tag_in_domain(self): @@ -569,8 +587,10 @@ class DomainAdminUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: r = c.put( - '/v3/projects/%s/tags' % project['id'], headers=self.headers, - json=update, expected_status_code=http.client.OK + '/v3/projects/%s/tags' % project['id'], + headers=self.headers, + json=update, + expected_status_code=http.client.OK, ) self.assertTrue(len(r.json['tags']) == 1) self.assertEqual(new_tag, r.json['tags'][0]) @@ -586,14 +606,16 @@ class DomainAdminUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( '/v3/projects/%s/tags/%s' % (project['id'], tag), - headers=self.headers + headers=self.headers, ) -class DomainMemberUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserTagTests, - _DomainMemberAndReaderTagTests): +class DomainMemberUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserTagTests, + _DomainMemberAndReaderTagTests, +): def setUp(self): super(DomainMemberUserTests, self).setUp() @@ -617,12 +639,14 @@ class DomainMemberUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], + user_id=self.user_id, + password=domain_admin['password'], domain_id=self.domain_id, ) @@ -634,10 +658,12 @@ class DomainMemberUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainReaderUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUserTagTests, - _DomainMemberAndReaderTagTests): +class DomainReaderUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUserTagTests, + _DomainMemberAndReaderTagTests, +): def setUp(self): super(DomainReaderUserTests, self).setUp() @@ -661,12 +687,14 @@ class DomainReaderUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], + user_id=self.user_id, + password=domain_admin['password'], domain_id=self.domain_id, ) @@ -688,7 +716,7 @@ class _ProjectUserTagTests(object): c.get( '/v3/projects/%s/tags/%s' % (self.project_id, tag), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_list_tags_for_project(self): @@ -705,7 +733,7 @@ class _ProjectUserTagTests(object): def test_user_cannot_create_tag_for_other_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -714,13 +742,13 @@ class _ProjectUserTagTests(object): c.put( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_tag_for_other_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -730,14 +758,16 @@ class _ProjectUserTagTests(object): with self.test_client() as c: c.put( - '/v3/projects/%s/tags' % project['id'], headers=self.headers, - json=update, expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s/tags' % project['id'], + headers=self.headers, + json=update, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_tag_for_other_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -747,13 +777,13 @@ class _ProjectUserTagTests(object): c.delete( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_tag_for_other_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -763,13 +793,13 @@ class _ProjectUserTagTests(object): c.get( '/v3/projects/%s/tags/%s' % (project['id'], tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_tags_for_other_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) tag = uuid.uuid4().hex @@ -779,7 +809,7 @@ class _ProjectUserTagTests(object): c.get( '/v3/projects/%s/tags' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -791,7 +821,7 @@ class _ProjectMemberAndReaderTagTests(object): c.put( '/v3/projects/%s/tags/%s' % (self.project_id, tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_project_tag(self): @@ -802,8 +832,10 @@ class _ProjectMemberAndReaderTagTests(object): with self.test_client() as c: c.put( - '/v3/projects/%s/tags' % self.project_id, headers=self.headers, - json=update, expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s/tags' % self.project_id, + headers=self.headers, + json=update, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_project_tag(self): @@ -814,13 +846,15 @@ class _ProjectMemberAndReaderTagTests(object): c.delete( '/v3/projects/%s/tags/%s' % (self.project_id, tag), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class ProjectAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ProjectUserTagTests): +class ProjectAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ProjectUserTagTests, +): def setUp(self): super(ProjectAdminTests, self).setUp() @@ -838,14 +872,16 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, self.user_id = self.bootstrapper.admin_user_id PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - project_id=self.bootstrapper.project_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + project_id=self.bootstrapper.project_id, ) self.project_id = self.bootstrapper.project_id auth = self.build_authentication_request( - user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + user_id=self.user_id, + password=self.bootstrapper.admin_password, + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -860,7 +896,8 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( '/v3/projects/%s/tags/%s' % (self.project_id, tag), - headers=self.headers, expected_status_code=http.client.CREATED + headers=self.headers, + expected_status_code=http.client.CREATED, ) def test_user_can_update_project_tag(self): @@ -871,8 +908,10 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/projects/%s/tags' % self.project_id, headers=self.headers, - json=update, expected_status_code=http.client.OK + '/v3/projects/%s/tags' % self.project_id, + headers=self.headers, + json=update, + expected_status_code=http.client.OK, ) def test_user_can_delete_project_tag(self): @@ -882,14 +921,16 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( '/v3/projects/%s/tags/%s' % (self.project_id, tag), - headers=self.headers + headers=self.headers, ) -class ProjectMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ProjectUserTagTests, - _ProjectMemberAndReaderTagTests): +class ProjectMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ProjectUserTagTests, + _ProjectMemberAndReaderTagTests, +): def setUp(self): super(ProjectMemberTests, self).setUp() @@ -907,19 +948,21 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) self.project_id = project['id'] self.user_id = self.bootstrapper.admin_user_id PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.project_id + user_id=self.user_id, + password=self.bootstrapper.admin_password, + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -930,10 +973,12 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _ProjectUserTagTests, - _ProjectMemberAndReaderTagTests): +class ProjectReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _ProjectUserTagTests, + _ProjectMemberAndReaderTagTests, +): def setUp(self): super(ProjectReaderTests, self).setUp() @@ -951,19 +996,21 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) self.project_id = project['id'] self.user_id = self.bootstrapper.admin_user_id PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.project_id + user_id=self.user_id, + password=self.bootstrapper.admin_password, + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_projects.py b/keystone/tests/protection/v3/test_projects.py index 70ea051fcd..909e6df95d 100644 --- a/keystone/tests/protection/v3/test_projects.py +++ b/keystone/tests/protection/v3/test_projects.py @@ -34,7 +34,7 @@ class _SystemUserTests(object): def test_user_can_list_projects(self): PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: @@ -44,24 +44,25 @@ class _SystemUserTests(object): def test_user_can_list_projects_for_other_users(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) user = PROVIDERS.identity_api.create_user( unit.new_user_ref( - CONF.identity.default_domain_id, - id=uuid.uuid4().hex + CONF.identity.default_domain_id, id=uuid.uuid4().hex ) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: r = c.get( - '/v3/users/%s/projects' % user['id'], headers=self.headers, + '/v3/users/%s/projects' % user['id'], + headers=self.headers, ) self.assertEqual(1, len(r.json['projects'])) self.assertEqual(project['id'], r.json['projects'][0]['id']) @@ -69,7 +70,7 @@ class _SystemUserTests(object): def test_user_can_get_a_project(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: @@ -79,8 +80,9 @@ class _SystemUserTests(object): def test_user_cannot_get_non_existent_project_not_found(self): with self.test_client() as c: c.get( - '/v3/projects/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + '/v3/projects/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, ) @@ -96,23 +98,26 @@ class _SystemMemberAndReaderProjectTests(object): with self.test_client() as c: c.post( - '/v3/projects', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_projects(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) update = {'project': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/projects/%s' % project['id'], json=update, + '/v3/projects/%s' % project['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existent_project_forbidden(self): @@ -120,28 +125,31 @@ class _SystemMemberAndReaderProjectTests(object): with self.test_client() as c: c.patch( - '/v3/projects/%s' % uuid.uuid4().hex, json=update, + '/v3/projects/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_projects(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.delete( - '/v3/projects/%s' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_project_forbidden(self): with self.test_client() as c: c.delete( - '/v3/projects/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -150,8 +158,7 @@ class _DomainUsersTests(object): def test_user_can_list_projects_within_domain(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) with self.test_client() as c: @@ -162,7 +169,7 @@ class _DomainUsersTests(object): def test_user_cannot_list_projects_in_other_domain(self): PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: @@ -171,8 +178,7 @@ class _DomainUsersTests(object): def test_user_can_get_a_project_within_domain(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) with self.test_client() as c: @@ -182,31 +188,29 @@ class _DomainUsersTests(object): def test_user_cannot_get_a_project_in_other_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.get( - '/v3/projects/%s' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_list_projects_for_user_in_domain(self): user = PROVIDERS.identity_api.create_user( - unit.new_user_ref( - self.domain_id, - id=uuid.uuid4().hex - ) + unit.new_user_ref(self.domain_id, id=uuid.uuid4().hex) ) project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: @@ -219,25 +223,26 @@ class _DomainUsersTests(object): def test_user_cannot_list_projects_for_user_in_other_domain(self): user = PROVIDERS.identity_api.create_user( unit.new_user_ref( - CONF.identity.default_domain_id, - id=uuid.uuid4().hex + CONF.identity.default_domain_id, id=uuid.uuid4().hex ) ) project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/users/%s/projects' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s/projects' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -249,8 +254,10 @@ class _DomainMemberAndReaderProjectTests(object): with self.test_client() as c: c.post( - '/v3/projects', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_projects_in_other_domains(self): @@ -262,38 +269,41 @@ class _DomainMemberAndReaderProjectTests(object): with self.test_client() as c: c.post( - '/v3/projects', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_projects_within_domain(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) update = {'project': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/projects/%s' % project['id'], json=update, + '/v3/projects/%s' % project['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_projects_in_other_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) update = {'project': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/projects/%s' % project['id'], json=update, + '/v3/projects/%s' % project['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existent_project_forbidden(self): @@ -301,47 +311,52 @@ class _DomainMemberAndReaderProjectTests(object): with self.test_client() as c: c.patch( - '/v3/projects/%s' % uuid.uuid4().hex, json=update, + '/v3/projects/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_projects_within_domain(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) with self.test_client() as c: c.delete( - '/v3/projects/%s' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_projects_in_other_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.delete( - '/v3/projects/%s' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_projects_forbidden(self): with self.test_client() as c: c.delete( - '/v3/projects/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests, - _SystemMemberAndReaderProjectTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, + _SystemMemberAndReaderProjectTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -352,16 +367,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -372,10 +386,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests, - _SystemMemberAndReaderProjectTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, + _SystemMemberAndReaderProjectTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -386,16 +402,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -406,9 +421,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -420,7 +437,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -443,15 +460,16 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_can_update_projects(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) update = {'project': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/projects/%s' % project['id'], json=update, - headers=self.headers + '/v3/projects/%s' % project['id'], + json=update, + headers=self.headers, ) def test_user_can_update_non_existent_project_not_found(self): @@ -459,15 +477,16 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/projects/%s' % uuid.uuid4().hex, json=update, + '/v3/projects/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) def test_user_can_delete_projects(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: @@ -476,29 +495,32 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_can_delete_non_existent_project_not_found(self): with self.test_client() as c: c.delete( - '/v3/projects/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + '/v3/projects/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, ) def test_user_can_list_their_projects(self): other_project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) user_project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=user_project['id'] + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=user_project['id'], ) with self.test_client() as c: r = c.get( - '/v3/users/%s/projects' % self.user_id, headers=self.headers, + '/v3/users/%s/projects' % self.user_id, + headers=self.headers, ) self.assertEqual(2, len(r.json['projects'])) project_ids = [] @@ -510,10 +532,12 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, self.assertNotIn(other_project['id'], project_ids) -class DomainReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUsersTests, - _DomainMemberAndReaderProjectTests): +class DomainReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUsersTests, + _DomainMemberAndReaderProjectTests, +): def setUp(self): super(DomainReaderTests, self).setUp() @@ -528,13 +552,15 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, domain_user = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -545,10 +571,12 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUsersTests, - _DomainMemberAndReaderProjectTests): +class DomainMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUsersTests, + _DomainMemberAndReaderProjectTests, +): def setUp(self): super(DomainMemberTests, self).setUp() @@ -563,13 +591,15 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, domain_user = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -580,9 +610,11 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainUsersTests): +class DomainAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainUsersTests, +): def setUp(self): super(DomainAdminTests, self).setUp() @@ -606,12 +638,14 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], + user_id=self.user_id, + password=domain_admin['password'], domain_id=self.domain_id, ) @@ -634,14 +668,15 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, with open(self.policy_file_name, 'w') as f: overridden_policies = { 'identity:get_project': ( - pp.SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER), + pp.SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER + ), 'identity:list_user_projects': ( - pp.SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER), - 'identity:list_projects': ( - pp.SYSTEM_READER_OR_DOMAIN_READER), + pp.SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER + ), + 'identity:list_projects': (pp.SYSTEM_READER_OR_DOMAIN_READER), 'identity:create_project': pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, 'identity:update_project': pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, - 'identity:delete_project': pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN + 'identity:delete_project': pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, } f.write(jsonutils.dumps(overridden_policies)) @@ -660,37 +695,40 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.post( - '/v3/projects', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_update_projects_within_domain(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) update = {'project': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/projects/%s' % project['id'], json=update, - headers=self.headers + '/v3/projects/%s' % project['id'], + json=update, + headers=self.headers, ) def test_user_cannot_update_projects_in_other_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) update = {'project': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/projects/%s' % project['id'], json=update, + '/v3/projects/%s' % project['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existent_project_forbidden(self): @@ -704,15 +742,15 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/projects/%s' % uuid.uuid4().hex, json=update, + '/v3/projects/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_delete_projects_within_domain(self): project = PROVIDERS.resource_api.create_project( - uuid.uuid4().hex, - unit.new_project_ref(domain_id=self.domain_id) + uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) with self.test_client() as c: @@ -721,13 +759,14 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, def test_user_cannot_delete_projects_in_other_domain(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.delete( - '/v3/projects/%s' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_projects_forbidden(self): @@ -739,13 +778,15 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, # a 403 instead of a 404. with self.test_client() as c: c.delete( - '/v3/projects/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, common_auth.AuthTestMixin +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -763,14 +804,16 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, self.user_id = self.bootstrapper.admin_user_id PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=self.bootstrapper.project_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=self.bootstrapper.project_id, ) self.project_id = self.bootstrapper.project_id auth = self.build_authentication_request( - user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + user_id=self.user_id, + password=self.bootstrapper.admin_password, + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -792,14 +835,15 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with open(self.policy_file_name, 'w') as f: overridden_policies = { 'identity:get_project': ( - pp.SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER), + pp.SYSTEM_READER_OR_DOMAIN_READER_OR_PROJECT_USER + ), 'identity:list_user_projects': ( - pp.SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER), - 'identity:list_projects': ( - pp.SYSTEM_READER_OR_DOMAIN_READER), + pp.SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER + ), + 'identity:list_projects': (pp.SYSTEM_READER_OR_DOMAIN_READER), 'identity:create_project': pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, 'identity:update_project': pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, - 'identity:delete_project': pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN + 'identity:delete_project': pp.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, } f.write(jsonutils.dumps(overridden_policies)) @@ -808,32 +852,34 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, # on the project created by ``keystone-manage bootstrap``. with self.test_client() as c: c.get( - '/v3/projects', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_projects_for_others(self): user = PROVIDERS.identity_api.create_user( unit.new_user_ref( - CONF.identity.default_domain_id, - id=uuid.uuid4().hex + CONF.identity.default_domain_id, id=uuid.uuid4().hex ) ) project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) with self.test_client() as c: c.get( - '/v3/users/%s/projects' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s/projects' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_list_their_projects(self): @@ -842,7 +888,8 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, # administrative, reserved for system and domain users. with self.test_client() as c: r = c.get( - '/v3/users/%s/projects' % self.user_id, headers=self.headers, + '/v3/users/%s/projects' % self.user_id, + headers=self.headers, ) self.assertEqual(1, len(r.json['projects'])) self.assertEqual(self.project_id, r.json['projects'][0]['id']) @@ -854,13 +901,14 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, def test_user_cannot_get_other_projects(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.get( - '/v3/projects/%s' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_projects(self): @@ -872,23 +920,26 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.post( - '/v3/projects', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_projects(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) update = {'project': {'description': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/projects/%s' % project['id'], json=update, + '/v3/projects/%s' % project['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existent_project_forbidden(self): @@ -896,26 +947,29 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/projects/%s' % uuid.uuid4().hex, json=update, + '/v3/projects/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_projects(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) with self.test_client() as c: c.delete( - '/v3/projects/%s' % project['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % project['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_project_forbidden(self): with self.test_client() as c: c.delete( - '/v3/projects/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/projects/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) diff --git a/keystone/tests/protection/v3/test_protocols.py b/keystone/tests/protection/v3/test_protocols.py index 50b9983287..d437b6a23f 100644 --- a/keystone/tests/protection/v3/test_protocols.py +++ b/keystone/tests/protection/v3/test_protocols.py @@ -51,8 +51,8 @@ class _SystemUserProtocolTests(object): with self.test_client() as c: path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols' % - identity_provider['id'] + '/v3/OS-FEDERATION/identity_providers/%s/protocols' + % identity_provider['id'] ) r = c.get(path, headers=self.headers) self.assertEqual(1, len(r.json['protocols'])) @@ -62,9 +62,9 @@ class _SystemUserProtocolTests(object): protocol, mapping, identity_provider = self._create_protocol_and_deps() with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol['id']) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol['id'], ) c.get(path, headers=self.headers) @@ -85,13 +85,15 @@ class _SystemReaderAndMemberProtocolTests(object): create = {'protocol': {'mapping_id': mapping['id']}} with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol_id) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol_id, ) c.put( - path, json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_protocols(self): @@ -103,26 +105,29 @@ class _SystemReaderAndMemberProtocolTests(object): update = {'protocol': {'mapping_id': new_mapping['id']}} with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol['id']) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol['id'], ) c.patch( - path, json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_protocol(self): protocol, mapping, identity_provider = self._create_protocol_and_deps() with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol['id']) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol['id'], ) c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -142,13 +147,15 @@ class _DomainAndProjectUserProtocolTests(object): create = {'protocol': {'mapping_id': mapping['id']}} with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol_id) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol_id, ) c.put( - path, json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_protocols(self): @@ -160,26 +167,29 @@ class _DomainAndProjectUserProtocolTests(object): update = {'protocol': {'mapping_id': new_mapping['id']}} with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol['id']) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol['id'], ) c.patch( - path, json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_protocol(self): protocol, mapping, identity_provider = self._create_protocol_and_deps() with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol['id']) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol['id'], ) c.delete( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_protocols(self): @@ -187,33 +197,37 @@ class _DomainAndProjectUserProtocolTests(object): with self.test_client() as c: path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols' % - identity_provider['id'] + '/v3/OS-FEDERATION/identity_providers/%s/protocols' + % identity_provider['id'] ) c.get( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_a_protocol(self): protocol, mapping, identity_provider = self._create_protocol_and_deps() with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol['id']) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol['id'], ) c.get( - path, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + path, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUtilities, - _SystemUserProtocolTests, - _SystemReaderAndMemberProtocolTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUtilities, + _SystemUserProtocolTests, + _SystemReaderAndMemberProtocolTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -224,16 +238,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -244,11 +257,13 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUtilities, - _SystemUserProtocolTests, - _SystemReaderAndMemberProtocolTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUtilities, + _SystemUserProtocolTests, + _SystemReaderAndMemberProtocolTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -259,16 +274,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -279,10 +293,12 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUtilities, - _SystemUserProtocolTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUtilities, + _SystemUserProtocolTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -296,7 +312,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -320,13 +336,15 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, create = {'protocol': {'mapping_id': mapping['id']}} with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol_id) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol_id, ) c.put( - path, json=create, headers=self.headers, - expected_status_code=http.client.CREATED + path, + json=create, + headers=self.headers, + expected_status_code=http.client.CREATED, ) def test_user_can_update_protocols(self): @@ -338,9 +356,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, update = {'protocol': {'mapping_id': new_mapping['id']}} with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol['id']) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol['id'], ) c.patch(path, json=update, headers=self.headers) @@ -348,17 +366,19 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, protocol, mapping, identity_provider = self._create_protocol_and_deps() with self.test_client() as c: - path = ( - '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % - (identity_provider['id'], protocol['id']) + path = '/v3/OS-FEDERATION/identity_providers/%s/protocols/%s' % ( + identity_provider['id'], + protocol['id'], ) c.delete(path, headers=self.headers) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUtilities, - _DomainAndProjectUserProtocolTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUtilities, + _DomainAndProjectUserProtocolTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -373,14 +393,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -391,10 +412,12 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUtilities, - _DomainAndProjectUserProtocolTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUtilities, + _DomainAndProjectUserProtocolTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -406,7 +429,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -418,10 +441,11 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUtilities, - _DomainAndProjectUserProtocolTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUtilities, + _DomainAndProjectUserProtocolTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -443,14 +467,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_regions.py b/keystone/tests/protection/v3/test_regions.py index b627cd8a04..d8a4b3ddb7 100644 --- a/keystone/tests/protection/v3/test_regions.py +++ b/keystone/tests/protection/v3/test_regions.py @@ -54,8 +54,10 @@ class _SystemReaderAndMemberUserRegionTests(object): with self.test_client() as c: c.post( - '/v3/regions', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/regions', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_regions(self): @@ -64,9 +66,10 @@ class _SystemReaderAndMemberUserRegionTests(object): with self.test_client() as c: update = {'region': {'description': uuid.uuid4().hex}} c.patch( - '/v3/regions/%s' % region['id'], json=update, + '/v3/regions/%s' % region['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_regions(self): @@ -76,7 +79,7 @@ class _SystemReaderAndMemberUserRegionTests(object): c.delete( '/v3/regions/%s' % region['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -88,8 +91,10 @@ class _DomainAndProjectUserRegionTests(object): with self.test_client() as c: c.post( - '/v3/regions', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/regions', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_regions(self): @@ -98,9 +103,10 @@ class _DomainAndProjectUserRegionTests(object): with self.test_client() as c: update = {'region': {'description': uuid.uuid4().hex}} c.patch( - '/v3/regions/%s' % region['id'], json=update, + '/v3/regions/%s' % region['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_regions(self): @@ -110,14 +116,16 @@ class _DomainAndProjectUserRegionTests(object): c.delete( '/v3/regions/%s' % region['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegionTests, - _SystemReaderAndMemberUserRegionTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegionTests, + _SystemReaderAndMemberUserRegionTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -128,16 +136,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -148,10 +155,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegionTests, - _SystemReaderAndMemberUserRegionTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegionTests, + _SystemReaderAndMemberUserRegionTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -162,16 +171,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -182,9 +190,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegionTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegionTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -198,7 +208,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -220,8 +230,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: update = {'region': {'description': uuid.uuid4().hex}} c.patch( - '/v3/regions/%s' % region['id'], json=update, - headers=self.headers + '/v3/regions/%s' % region['id'], + json=update, + headers=self.headers, ) def test_user_can_delete_regions(self): @@ -231,10 +242,12 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, c.delete('/v3/regions/%s' % region['id'], headers=self.headers) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegionTests, - _DomainAndProjectUserRegionTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegionTests, + _DomainAndProjectUserRegionTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -249,14 +262,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -267,10 +281,12 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegionTests, - _DomainAndProjectUserRegionTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegionTests, + _DomainAndProjectUserRegionTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -284,7 +300,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -296,10 +312,11 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegionTests, - _DomainAndProjectUserRegionTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegionTests, + _DomainAndProjectUserRegionTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -321,14 +338,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_registered_limits.py b/keystone/tests/protection/v3/test_registered_limits.py index f508eb853a..7f7bee7fcf 100644 --- a/keystone/tests/protection/v3/test_registered_limits.py +++ b/keystone/tests/protection/v3/test_registered_limits.py @@ -61,9 +61,7 @@ class _UserRegisteredLimitTests(object): limit_id = limits[0]['id'] with self.test_client() as c: - r = c.get( - '/v3/registered_limits', headers=self.headers - ) + r = c.get('/v3/registered_limits', headers=self.headers) self.assertTrue(len(r.json['registered_limits']) == 1) self.assertEqual(limit_id, r.json['registered_limits'][0]['id']) @@ -74,16 +72,16 @@ class _UserRegisteredLimitTests(object): create = { 'registered_limits': [ - unit.new_registered_limit_ref( - service_id=service['id'] - ) + unit.new_registered_limit_ref(service_id=service['id']) ] } with self.test_client() as c: c.post( - '/v3/registered_limits', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/registered_limits', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_registered_limits(self): @@ -100,14 +98,13 @@ class _UserRegisteredLimitTests(object): limit_id = limits[0]['id'] with self.test_client() as c: - update = { - 'registered_limit': {'default_limit': 5} - } + update = {'registered_limit': {'default_limit': 5}} c.patch( - '/v3/registered_limits/%s' % limit_id, json=update, + '/v3/registered_limits/%s' % limit_id, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_registered_limits(self): @@ -125,14 +122,17 @@ class _UserRegisteredLimitTests(object): with self.test_client() as c: c.delete( - '/v3/registered_limits/%s' % limit_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/registered_limits/%s' % limit_id, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegisteredLimitTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegisteredLimitTests, +): def setUp(self): super(SystemReaderTests, self).setUp() self.loadapp() @@ -142,16 +142,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -162,9 +161,11 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegisteredLimitTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegisteredLimitTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -175,16 +176,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -195,8 +195,9 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, common_auth.AuthTestMixin +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -210,7 +211,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -253,9 +254,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, limit_id = limits[0]['id'] with self.test_client() as c: - r = c.get( - '/v3/registered_limits', headers=self.headers - ) + r = c.get('/v3/registered_limits', headers=self.headers) self.assertTrue(len(r.json['registered_limits']) == 1) self.assertEqual(limit_id, r.json['registered_limits'][0]['id']) @@ -266,9 +265,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, create = { 'registered_limits': [ - unit.new_registered_limit_ref( - service_id=service['id'] - ) + unit.new_registered_limit_ref(service_id=service['id']) ] } @@ -289,13 +286,12 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, limit_id = limits[0]['id'] with self.test_client() as c: - update = { - 'registered_limit': {'default_limit': 5} - } + update = {'registered_limit': {'default_limit': 5}} c.patch( - '/v3/registered_limits/%s' % limit_id, json=update, - headers=self.headers + '/v3/registered_limits/%s' % limit_id, + json=update, + headers=self.headers, ) def test_user_can_delete_registered_limits(self): @@ -317,9 +313,11 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegisteredLimitTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegisteredLimitTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -334,14 +332,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -352,9 +351,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegisteredLimitTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegisteredLimitTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -368,7 +369,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -380,9 +381,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _UserRegisteredLimitTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _UserRegisteredLimitTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -404,14 +406,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_roles.py b/keystone/tests/protection/v3/test_roles.py index becfeee123..2b96290554 100644 --- a/keystone/tests/protection/v3/test_roles.py +++ b/keystone/tests/protection/v3/test_roles.py @@ -56,8 +56,10 @@ class _SystemReaderAndMemberRoleTests(object): with self.test_client() as c: c.post( - '/v3/roles', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_roles(self): @@ -69,8 +71,10 @@ class _SystemReaderAndMemberRoleTests(object): with self.test_client() as c: c.patch( - '/v3/roles/%s' % role['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_roles(self): @@ -80,8 +84,9 @@ class _SystemReaderAndMemberRoleTests(object): with self.test_client() as c: c.delete( - '/v3/roles/%s' % role['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -93,8 +98,9 @@ class _DomainAndProjectUserRoleTests(object): with self.test_client() as c: c.get( - '/v3/roles', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_a_role(self): @@ -104,8 +110,9 @@ class _DomainAndProjectUserRoleTests(object): with self.test_client() as c: c.get( - '/v3/roles/%s' % role['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_roles(self): @@ -113,8 +120,10 @@ class _DomainAndProjectUserRoleTests(object): with self.test_client() as c: c.post( - '/v3/roles', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_roles(self): @@ -126,8 +135,10 @@ class _DomainAndProjectUserRoleTests(object): with self.test_client() as c: c.patch( - '/v3/roles/%s' % role['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_roles(self): @@ -137,15 +148,18 @@ class _DomainAndProjectUserRoleTests(object): with self.test_client() as c: c.delete( - '/v3/roles/%s' % role['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/roles/%s' % role['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserRoleTests, - _SystemReaderAndMemberRoleTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserRoleTests, + _SystemReaderAndMemberRoleTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -156,16 +170,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -176,10 +189,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserRoleTests, - _SystemReaderAndMemberRoleTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserRoleTests, + _SystemReaderAndMemberRoleTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -190,16 +205,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -210,9 +224,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserRoleTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserRoleTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -226,7 +242,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -251,7 +267,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/roles/%s' % role['id'], json=update, headers=self.headers, + '/v3/roles/%s' % role['id'], + json=update, + headers=self.headers, ) def test_user_can_delete_roles(self): @@ -263,9 +281,11 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, c.delete('/v3/roles/%s' % role['id'], headers=self.headers) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserRoleTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserRoleTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -280,14 +300,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -298,9 +319,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserRoleTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserRoleTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -312,7 +335,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -324,9 +347,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserRoleTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserRoleTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -348,14 +372,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_service_providers.py b/keystone/tests/protection/v3/test_service_providers.py index acebbc4e77..0f1ef89122 100644 --- a/keystone/tests/protection/v3/test_service_providers.py +++ b/keystone/tests/protection/v3/test_service_providers.py @@ -49,9 +49,9 @@ class _SystemUserServiceProviderTests(object): with self.test_client() as c: r = c.get( - '/v3/OS-FEDERATION/service_providers/%s' % - service_provider['id'], - headers=self.headers + '/v3/OS-FEDERATION/service_providers/%s' + % service_provider['id'], + headers=self.headers, ) self.assertEqual( service_provider['id'], r.json['service_provider']['id'] @@ -74,7 +74,7 @@ class _SystemReaderAndMemberUserServiceProviderTests(object): '/v3/OS-FEDERATION/service_providers/%s' % uuid.uuid4().hex, headers=self.headers, json=create, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_service_providers(self): @@ -86,11 +86,11 @@ class _SystemReaderAndMemberUserServiceProviderTests(object): with self.test_client() as c: c.patch( - '/v3/OS-FEDERATION/service_providers/%s' % - service_provider['id'], + '/v3/OS-FEDERATION/service_providers/%s' + % service_provider['id'], headers=self.headers, json=update, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_service_providers(self): @@ -100,10 +100,10 @@ class _SystemReaderAndMemberUserServiceProviderTests(object): with self.test_client() as c: c.delete( - '/v3/OS-FEDERATION/service_providers/%s' % - service_provider['id'], + '/v3/OS-FEDERATION/service_providers/%s' + % service_provider['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -123,7 +123,7 @@ class _DomainAndProjectUserServiceProviderTests(object): '/v3/OS-FEDERATION/service_providers/%s' % uuid.uuid4().hex, headers=self.headers, json=create, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_service_providers(self): @@ -135,11 +135,11 @@ class _DomainAndProjectUserServiceProviderTests(object): with self.test_client() as c: c.patch( - '/v3/OS-FEDERATION/service_providers/%s' % - service_provider['id'], + '/v3/OS-FEDERATION/service_providers/%s' + % service_provider['id'], headers=self.headers, json=update, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_service_providers(self): @@ -149,8 +149,9 @@ class _DomainAndProjectUserServiceProviderTests(object): with self.test_client() as c: c.get( - '/v3/OS-FEDERATION/service_providers', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/OS-FEDERATION/service_providers', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_a_service_provider(self): @@ -160,10 +161,10 @@ class _DomainAndProjectUserServiceProviderTests(object): with self.test_client() as c: c.get( - '/v3/OS-FEDERATION/service_providers/%s' % - service_provider['id'], + '/v3/OS-FEDERATION/service_providers/%s' + % service_provider['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_service_providers(self): @@ -173,17 +174,19 @@ class _DomainAndProjectUserServiceProviderTests(object): with self.test_client() as c: c.delete( - '/v3/OS-FEDERATION/service_providers/%s' % - service_provider['id'], + '/v3/OS-FEDERATION/service_providers/%s' + % service_provider['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserServiceProviderTests, - _SystemReaderAndMemberUserServiceProviderTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserServiceProviderTests, + _SystemReaderAndMemberUserServiceProviderTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -194,16 +197,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -214,10 +216,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserServiceProviderTests, - _SystemReaderAndMemberUserServiceProviderTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserServiceProviderTests, + _SystemReaderAndMemberUserServiceProviderTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -228,16 +232,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -248,9 +251,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserServiceProviderTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserServiceProviderTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -264,7 +269,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -287,7 +292,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, '/v3/OS-FEDERATION/service_providers/%s' % uuid.uuid4().hex, headers=self.headers, json=create, - expected_status_code=http.client.CREATED + expected_status_code=http.client.CREATED, ) def test_user_can_update_service_providers(self): @@ -299,10 +304,10 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/OS-FEDERATION/service_providers/%s' % - service_provider['id'], + '/v3/OS-FEDERATION/service_providers/%s' + % service_provider['id'], headers=self.headers, - json=update + json=update, ) def test_user_can_delete_service_providers(self): @@ -312,15 +317,17 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/OS-FEDERATION/service_providers/%s' % - service_provider['id'], - headers=self.headers + '/v3/OS-FEDERATION/service_providers/%s' + % service_provider['id'], + headers=self.headers, ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserServiceProviderTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserServiceProviderTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -335,14 +342,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -353,9 +361,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserServiceProviderTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserServiceProviderTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -367,7 +377,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -379,9 +389,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserServiceProviderTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserServiceProviderTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -403,14 +414,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_services.py b/keystone/tests/protection/v3/test_services.py index 2fa6f2b80d..87cd52a57e 100644 --- a/keystone/tests/protection/v3/test_services.py +++ b/keystone/tests/protection/v3/test_services.py @@ -67,8 +67,10 @@ class _SystemReaderAndMemberUserServiceTests(object): with self.test_client() as c: c.post( - '/v3/services', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/services', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_services(self): @@ -79,9 +81,10 @@ class _SystemReaderAndMemberUserServiceTests(object): with self.test_client() as c: c.patch( - '/v3/services/%s' % service['id'], json=update, + '/v3/services/%s' % service['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_services(self): @@ -90,8 +93,9 @@ class _SystemReaderAndMemberUserServiceTests(object): with self.test_client() as c: c.delete( - '/v3/services/%s' % service['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/services/%s' % service['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -107,8 +111,10 @@ class _DomainAndProjectUserServiceTests(object): with self.test_client() as c: c.post( - '/v3/services', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/services', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_services(self): @@ -117,8 +123,9 @@ class _DomainAndProjectUserServiceTests(object): with self.test_client() as c: c.get( - '/v3/services', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/services', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_a_service(self): @@ -127,8 +134,9 @@ class _DomainAndProjectUserServiceTests(object): with self.test_client() as c: c.get( - '/v3/services/%s' % service['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/services/%s' % service['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_services(self): @@ -139,9 +147,10 @@ class _DomainAndProjectUserServiceTests(object): with self.test_client() as c: c.patch( - '/v3/services/%s' % service['id'], json=update, + '/v3/services/%s' % service['id'], + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_services(self): @@ -150,15 +159,18 @@ class _DomainAndProjectUserServiceTests(object): with self.test_client() as c: c.delete( - '/v3/services/%s' % service['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/services/%s' % service['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserServiceTests, - _SystemReaderAndMemberUserServiceTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserServiceTests, + _SystemReaderAndMemberUserServiceTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -169,16 +181,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -189,10 +200,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserServiceTests, - _SystemReaderAndMemberUserServiceTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserServiceTests, + _SystemReaderAndMemberUserServiceTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -203,16 +216,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -223,9 +235,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserServiceTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserServiceTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -239,7 +253,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -268,8 +282,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.patch( - '/v3/services/%s' % service['id'], json=update, - headers=self.headers + '/v3/services/%s' % service['id'], + json=update, + headers=self.headers, ) def test_user_can_delete_services(self): @@ -280,9 +295,11 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, c.delete('/v3/services/%s' % service['id'], headers=self.headers) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserServiceTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserServiceTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -297,14 +314,15 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -315,9 +333,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserServiceTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserServiceTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -329,7 +349,7 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -341,9 +361,10 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, class ProjectUserTestsWithoutEnforceScope( - base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserServiceTests): + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserServiceTests, +): def setUp(self): super(ProjectUserTestsWithoutEnforceScope, self).setUp() @@ -365,14 +386,15 @@ class ProjectUserTestsWithoutEnforceScope( )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_system_assignments.py b/keystone/tests/protection/v3/test_system_assignments.py index a17d3e3247..3203127bb7 100644 --- a/keystone/tests/protection/v3/test_system_assignments.py +++ b/keystone/tests/protection/v3/test_system_assignments.py @@ -59,10 +59,10 @@ class _SystemUserSystemAssignmentTests(object): with self.test_client() as c: c.get( - '/v3/system/users/%s/roles/%s' % ( - user['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + '/v3/system/users/%s/roles/%s' + % (user['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, ) def test_user_can_list_group_system_role_assignments(self): @@ -77,7 +77,7 @@ class _SystemUserSystemAssignmentTests(object): with self.test_client() as c: r = c.get( '/v3/system/groups/%s/roles' % group['id'], - headers=self.headers + headers=self.headers, ) self.assertEqual(1, len(r.json['roles'])) self.assertEqual( @@ -95,10 +95,10 @@ class _SystemUserSystemAssignmentTests(object): with self.test_client() as c: c.get( - '/v3/system/groups/%s/roles/%s' % ( - group['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.NO_CONTENT + '/v3/system/groups/%s/roles/%s' + % (group['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.NO_CONTENT, ) @@ -111,10 +111,10 @@ class _SystemMemberAndReaderSystemAssignmentTests(object): with self.test_client() as c: c.put( - '/v3/system/users/%s/roles/%s' % ( - user['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/users/%s/roles/%s' + % (user['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_system_assignments(self): @@ -128,10 +128,10 @@ class _SystemMemberAndReaderSystemAssignmentTests(object): with self.test_client() as c: c.delete( - '/v3/system/users/%s/roles/%s' % ( - user['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/users/%s/roles/%s' + % (user['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_grant_group_system_assignment(self): @@ -141,10 +141,10 @@ class _SystemMemberAndReaderSystemAssignmentTests(object): with self.test_client() as c: c.put( - '/v3/system/groups/%s/roles/%s' % ( - group['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/groups/%s/roles/%s' + % (group['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_group_system_assignment(self): @@ -158,10 +158,10 @@ class _SystemMemberAndReaderSystemAssignmentTests(object): with self.test_client() as c: c.delete( - '/v3/system/groups/%s/roles/%s' % ( - group['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/groups/%s/roles/%s' + % (group['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -178,8 +178,9 @@ class _DomainAndProjectUserSystemAssignmentTests(object): with self.test_client() as c: c.get( - '/v3/system/users/%s/roles' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/users/%s/roles' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_check_user_system_role_assignments(self): @@ -193,10 +194,10 @@ class _DomainAndProjectUserSystemAssignmentTests(object): with self.test_client() as c: c.get( - '/v3/system/users/%s/roles/%s' % ( - user['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/users/%s/roles/%s' + % (user['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_grant_system_assignments(self): @@ -206,10 +207,10 @@ class _DomainAndProjectUserSystemAssignmentTests(object): with self.test_client() as c: c.put( - '/v3/system/users/%s/roles/%s' % ( - user['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/users/%s/roles/%s' + % (user['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_system_assignments(self): @@ -223,10 +224,10 @@ class _DomainAndProjectUserSystemAssignmentTests(object): with self.test_client() as c: c.delete( - '/v3/system/users/%s/roles/%s' % ( - user['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/users/%s/roles/%s' + % (user['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_group_system_role_assignments(self): @@ -242,7 +243,7 @@ class _DomainAndProjectUserSystemAssignmentTests(object): c.get( '/v3/system/groups/%s/roles' % group['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_check_group_system_role_assignments(self): @@ -256,10 +257,10 @@ class _DomainAndProjectUserSystemAssignmentTests(object): with self.test_client() as c: c.get( - '/v3/system/groups/%s/roles/%s' % ( - group['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/groups/%s/roles/%s' + % (group['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_grant_group_system_assignments(self): @@ -269,10 +270,10 @@ class _DomainAndProjectUserSystemAssignmentTests(object): with self.test_client() as c: c.put( - '/v3/system/groups/%s/roles/%s' % ( - group['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/groups/%s/roles/%s' + % (group['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_group_system_assignments(self): @@ -286,17 +287,19 @@ class _DomainAndProjectUserSystemAssignmentTests(object): with self.test_client() as c: c.delete( - '/v3/system/groups/%s/roles/%s' % ( - group['id'], self.bootstrapper.member_role_id - ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/system/groups/%s/roles/%s' + % (group['id'], self.bootstrapper.member_role_id), + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserSystemAssignmentTests, - _SystemMemberAndReaderSystemAssignmentTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserSystemAssignmentTests, + _SystemMemberAndReaderSystemAssignmentTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -307,16 +310,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -327,10 +329,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserSystemAssignmentTests, - _SystemMemberAndReaderSystemAssignmentTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserSystemAssignmentTests, + _SystemMemberAndReaderSystemAssignmentTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -341,9 +345,7 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) @@ -352,13 +354,14 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, { 'user_id': self.user_id, 'system': 'all', - 'role_id': self.bootstrapper.member_role_id + 'role_id': self.bootstrapper.member_role_id, } ] auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -369,9 +372,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserSystemAssignmentTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserSystemAssignmentTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -383,8 +388,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, self.expected = [] auth = self.build_authentication_request( - user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + user_id=self.user_id, + password=self.bootstrapper.admin_password, + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -401,9 +407,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/system/users/%s/roles/%s' % ( - user['id'], self.bootstrapper.member_role_id - ), headers=self.headers, + '/v3/system/users/%s/roles/%s' + % (user['id'], self.bootstrapper.member_role_id), + headers=self.headers, ) def test_user_can_revoke_system_assignments(self): @@ -417,9 +423,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/system/users/%s/roles/%s' % ( - user['id'], self.bootstrapper.member_role_id - ), headers=self.headers + '/v3/system/users/%s/roles/%s' + % (user['id'], self.bootstrapper.member_role_id), + headers=self.headers, ) def test_user_can_grant_group_system_assignments(self): @@ -429,9 +435,9 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.put( - '/v3/system/groups/%s/roles/%s' % ( - group['id'], self.bootstrapper.member_role_id - ), headers=self.headers, + '/v3/system/groups/%s/roles/%s' + % (group['id'], self.bootstrapper.member_role_id), + headers=self.headers, ) def test_user_can_revoke_group_system_assignments(self): @@ -445,15 +451,17 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/system/groups/%s/roles/%s' % ( - group['id'], self.bootstrapper.member_role_id - ), headers=self.headers + '/v3/system/groups/%s/roles/%s' + % (group['id'], self.bootstrapper.member_role_id), + headers=self.headers, ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserSystemAssignmentTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserSystemAssignmentTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -466,17 +474,19 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, ) self.domain_id = domain['id'] domain_user = unit.new_user_ref(domain_id=self.domain_id) - self.domain_user_id = PROVIDERS.identity_api.create_user( - domain_user - )['id'] + self.domain_user_id = PROVIDERS.identity_api.create_user(domain_user)[ + 'id' + ] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.domain_user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.domain_user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.domain_user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.domain_user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -487,9 +497,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserSystemAssignmentTests): +class ProjectReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserSystemAssignmentTests, +): def setUp(self): super(ProjectReaderTests, self).setUp() @@ -503,23 +515,24 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, self.domain_id = domain['id'] project_reader = unit.new_user_ref(domain_id=self.domain_id) - project_reader_id = PROVIDERS.identity_api.create_user( - project_reader - )['id'] + project_reader_id = PROVIDERS.identity_api.create_user(project_reader)[ + 'id' + ] project = unit.new_project_ref(domain_id=self.domain_id) project_id = PROVIDERS.resource_api.create_project( project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=project_reader_id, - project_id=project_id + self.bootstrapper.reader_role_id, + user_id=project_reader_id, + project_id=project_id, ) auth = self.build_authentication_request( user_id=project_reader_id, password=project_reader['password'], - project_id=project_id + project_id=project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -530,9 +543,11 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserSystemAssignmentTests): +class ProjectMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserSystemAssignmentTests, +): def setUp(self): super(ProjectMemberTests, self).setUp() @@ -546,23 +561,24 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.domain_id = domain['id'] project_member = unit.new_user_ref(domain_id=self.domain_id) - project_member_id = PROVIDERS.identity_api.create_user( - project_member - )['id'] + project_member_id = PROVIDERS.identity_api.create_user(project_member)[ + 'id' + ] project = unit.new_project_ref(domain_id=self.domain_id) project_id = PROVIDERS.resource_api.create_project( project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=project_member_id, - project_id=project_id + self.bootstrapper.member_role_id, + user_id=project_member_id, + project_id=project_id, ) auth = self.build_authentication_request( user_id=project_member_id, password=project_member['password'], - project_id=project_id + project_id=project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -573,9 +589,11 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserSystemAssignmentTests): +class ProjectAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserSystemAssignmentTests, +): def setUp(self): super(ProjectAdminTests, self).setUp() @@ -596,23 +614,24 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, self.domain_id = domain['id'] project_admin = unit.new_user_ref(domain_id=self.domain_id) - project_admin_id = PROVIDERS.identity_api.create_user( - project_admin - )['id'] + project_admin_id = PROVIDERS.identity_api.create_user(project_admin)[ + 'id' + ] project = unit.new_project_ref(domain_id=self.domain_id) project_id = PROVIDERS.resource_api.create_project( project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=project_admin_id, - project_id=project_id + self.bootstrapper.admin_role_id, + user_id=project_admin_id, + project_id=project_id, ) auth = self.build_authentication_request( user_id=project_admin_id, password=project_admin['password'], - project_id=project_id + project_id=project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -636,6 +655,6 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, 'identity:check_system_grant_for_user': base.SYSTEM_READER, 'identity:list_system_grants_for_user': base.SYSTEM_READER, 'identity:create_system_grant_for_user': base.SYSTEM_ADMIN, - 'identity:revoke_system_grant_for_user': base.SYSTEM_ADMIN + 'identity:revoke_system_grant_for_user': base.SYSTEM_ADMIN, } f.write(jsonutils.dumps(overridden_policies)) diff --git a/keystone/tests/protection/v3/test_tokens.py b/keystone/tests/protection/v3/test_tokens.py index 3e254f7cd9..5cc55768d3 100644 --- a/keystone/tests/protection/v3/test_tokens.py +++ b/keystone/tests/protection/v3/test_tokens.py @@ -36,8 +36,7 @@ class _SystemUserTokenTests(object): ) system_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - system=True + user_id=user['id'], password=user['password'], system=True ) with self.test_client() as c: @@ -57,13 +56,15 @@ class _SystemUserTokenTests(object): user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) domain_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - domain_id=domain['id'] + user_id=user['id'], + password=user['password'], + domain_id=domain['id'], ) with self.test_client() as c: @@ -77,20 +78,22 @@ class _SystemUserTokenTests(object): def test_user_can_validate_project_scoped_token(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) project_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - project_id=project['id'] + user_id=user['id'], + password=user['password'], + project_id=project['id'], ) with self.test_client() as c: @@ -113,8 +116,7 @@ class _SystemMemberAndReaderTokenTests(object): ) system_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - system=True + user_id=user['id'], password=user['password'], system=True ) with self.test_client() as c: @@ -124,8 +126,9 @@ class _SystemMemberAndReaderTokenTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = system_token c.delete( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_a_domain_scoped_token(self): @@ -137,13 +140,15 @@ class _SystemMemberAndReaderTokenTests(object): user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) domain_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - domain_id=domain['id'] + user_id=user['id'], + password=user['password'], + domain_id=domain['id'], ) with self.test_client() as c: @@ -153,27 +158,30 @@ class _SystemMemberAndReaderTokenTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = domain_token c.delete( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_a_project_scoped_token(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) project_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - project_id=project['id'] + user_id=user['id'], + password=user['password'], + project_id=project['id'], ) with self.test_client() as c: @@ -183,15 +191,18 @@ class _SystemMemberAndReaderTokenTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = project_token c.delete( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTokenTests, - _SystemMemberAndReaderTokenTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTokenTests, + _SystemMemberAndReaderTokenTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -202,16 +213,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -222,10 +232,12 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTokenTests, - _SystemMemberAndReaderTokenTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTokenTests, + _SystemMemberAndReaderTokenTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -236,16 +248,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -256,9 +267,11 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _SystemUserTokenTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _SystemUserTokenTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -270,7 +283,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -289,8 +302,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, ) system_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - system=True + user_id=user['id'], password=user['password'], system=True ) with self.test_client() as c: @@ -310,13 +322,15 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) domain_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - domain_id=domain['id'] + user_id=user['id'], + password=user['password'], + domain_id=domain['id'], ) with self.test_client() as c: @@ -330,20 +344,22 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_can_revoke_a_project_scoped_token(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) project_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - project_id=project['id'] + user_id=user['id'], + password=user['password'], + project_id=project['id'], ) with self.test_client() as c: @@ -376,8 +392,7 @@ class _DomainAndProjectUserTests(object): ) system_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - system=True + user_id=user['id'], password=user['password'], system=True ) with self.test_client() as c: @@ -387,8 +402,9 @@ class _DomainAndProjectUserTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = system_token c.get( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_system_scoped_token(self): @@ -400,8 +416,7 @@ class _DomainAndProjectUserTests(object): ) system_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - system=True + user_id=user['id'], password=user['password'], system=True ) with self.test_client() as c: @@ -411,8 +426,9 @@ class _DomainAndProjectUserTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = system_token c.delete( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_validate_domain_scoped_token(self): @@ -424,13 +440,15 @@ class _DomainAndProjectUserTests(object): user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) domain_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - domain_id=domain['id'] + user_id=user['id'], + password=user['password'], + domain_id=domain['id'], ) with self.test_client() as c: @@ -440,8 +458,9 @@ class _DomainAndProjectUserTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = domain_token c.get( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_domain_scoped_token(self): @@ -453,13 +472,15 @@ class _DomainAndProjectUserTests(object): user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - domain_id=domain['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + domain_id=domain['id'], ) domain_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - domain_id=domain['id'] + user_id=user['id'], + password=user['password'], + domain_id=domain['id'], ) with self.test_client() as c: @@ -469,27 +490,30 @@ class _DomainAndProjectUserTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = domain_token c.delete( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_validate_project_scoped_token(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) project_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - project_id=project['id'] + user_id=user['id'], + password=user['password'], + project_id=project['id'], ) with self.test_client() as c: @@ -499,27 +523,30 @@ class _DomainAndProjectUserTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = project_token c.get( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_revoke_project_scoped_token(self): project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, - unit.new_project_ref(domain_id=CONF.identity.default_domain_id) + unit.new_project_ref(domain_id=CONF.identity.default_domain_id), ) user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user['id'] = PROVIDERS.identity_api.create_user(user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=user['id'], - project_id=project['id'] + self.bootstrapper.reader_role_id, + user_id=user['id'], + project_id=project['id'], ) project_auth = self.build_authentication_request( - user_id=user['id'], password=user['password'], - project_id=project['id'] + user_id=user['id'], + password=user['password'], + project_id=project['id'], ) with self.test_client() as c: @@ -529,14 +556,17 @@ class _DomainAndProjectUserTests(object): with self.test_client() as c: self.headers['X-Subject-Token'] = project_token c.delete( - '/v3/auth/tokens', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/auth/tokens', + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class DomainUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserTests): +class DomainUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserTests, +): def setUp(self): super(DomainUserTests, self).setUp() @@ -549,17 +579,19 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, ) self.domain_id = domain['id'] domain_user = unit.new_user_ref(domain_id=self.domain_id) - self.domain_user_id = PROVIDERS.identity_api.create_user( - domain_user - )['id'] + self.domain_user_id = PROVIDERS.identity_api.create_user(domain_user)[ + 'id' + ] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.domain_user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.domain_user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.domain_user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.domain_user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -570,9 +602,11 @@ class DomainUserTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectUserTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _DomainAndProjectUserTests): +class ProjectUserTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _DomainAndProjectUserTests, +): def setUp(self): super(ProjectUserTests, self).setUp() @@ -586,23 +620,24 @@ class ProjectUserTests(base_classes.TestCaseWithBootstrap, self.domain_id = domain['id'] project_reader = unit.new_user_ref(domain_id=self.domain_id) - project_reader_id = PROVIDERS.identity_api.create_user( - project_reader - )['id'] + project_reader_id = PROVIDERS.identity_api.create_user(project_reader)[ + 'id' + ] project = unit.new_project_ref(domain_id=self.domain_id) project_id = PROVIDERS.resource_api.create_project( project['id'], project )['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=project_reader_id, - project_id=project_id + self.bootstrapper.reader_role_id, + user_id=project_reader_id, + project_id=project_id, ) auth = self.build_authentication_request( user_id=project_reader_id, password=project_reader['password'], - project_id=project_id + project_id=project_id, ) # Grab a token using the persona we're testing and prepare headers diff --git a/keystone/tests/protection/v3/test_trusts.py b/keystone/tests/protection/v3/test_trusts.py index ded2c2cc45..3bd7dc8f88 100644 --- a/keystone/tests/protection/v3/test_trusts.py +++ b/keystone/tests/protection/v3/test_trusts.py @@ -27,8 +27,9 @@ CONF = keystone.conf.CONF PROVIDERS = provider_api.ProviderAPIs -class TrustTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin): +class TrustTests( + base_classes.TestCaseWithBootstrap, common_auth.AuthTestMixin +): """Common functionality for all trust tests. Sets up trustor and trustee users and trust. @@ -52,34 +53,40 @@ class TrustTests(base_classes.TestCaseWithBootstrap, trustor_user = unit.new_user_ref(domain_id=self.domain_id) self.trustor_user_id = PROVIDERS.identity_api.create_user( - trustor_user)['id'] + trustor_user + )['id'] trustee_user = unit.new_user_ref(domain_id=self.domain_id) self.trustee_user_id = PROVIDERS.identity_api.create_user( - trustee_user)['id'] + trustee_user + )['id'] project = PROVIDERS.resource_api.create_project( uuid.uuid4().hex, unit.new_project_ref(domain_id=self.domain_id) ) self.project_id = project['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.trustor_user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.trustor_user_id, + project_id=self.project_id, ) PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.trustee_user_id, - project_id=project['id'] + self.bootstrapper.member_role_id, + user_id=self.trustee_user_id, + project_id=project['id'], ) self.trust_id = uuid.uuid4().hex self.trust_data = { - 'trust': {'trustor_user_id': self.trustor_user_id, - 'trustee_user_id': self.trustee_user_id, - 'project_id': self.project_id, - 'impersonation': False}, - 'roles': [{"id": self.bootstrapper.member_role_id}] + 'trust': { + 'trustor_user_id': self.trustor_user_id, + 'trustee_user_id': self.trustee_user_id, + 'project_id': self.project_id, + 'impersonation': False, + }, + 'roles': [{"id": self.bootstrapper.member_role_id}], } auth = self.build_authentication_request( user_id=self.trustor_user_id, password=trustor_user['password'], - project_id=project['id'] + project_id=project['id'], ) # Grab a token using the trustor persona we're testing and prepare # headers for requests we'll be making in the tests. @@ -91,7 +98,7 @@ class TrustTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.trustee_user_id, password=trustee_user['password'], - project_id=project['id'] + project_id=project['id'], ) # Grab a token using the trustee persona we're testing and prepare # headers for requests we'll be making in the tests. @@ -133,18 +140,14 @@ class _AdminTestsMixin(object): '/v3/OS-TRUST/trusts', json=json, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_admin_list_all_trusts(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: - r = c.get( - '/v3/OS-TRUST/trusts', - headers=self.headers - ) + r = c.get('/v3/OS-TRUST/trusts', headers=self.headers) self.assertEqual(1, len(r.json['trusts'])) @@ -162,13 +165,14 @@ class AdminTokenTests(TrustTests, _AdminTestsMixin): def test_admin_can_delete_trust_for_other_user(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.headers, - expected_status_code=http.client.NO_CONTENT + expected_status_code=http.client.NO_CONTENT, ) def test_admin_can_get_non_existent_trust_not_found(self): @@ -177,41 +181,40 @@ class AdminTokenTests(TrustTests, _AdminTestsMixin): c.get( '/v3/OS-TRUST/trusts/%s' % trust_id, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) def test_admin_cannot_get_trust_for_other_user(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s' % self.trust_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_admin_cannot_list_trust_roles_for_other_user(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_admin_cannot_get_trust_role_for_other_user(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -224,63 +227,64 @@ class _SystemUserTests(object): c.get( '/v3/OS-TRUST/trusts/%s' % trust_id, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) def test_user_can_get_trust_for_other_user(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: r = c.get( - '/v3/OS-TRUST/trusts/%s' % self.trust_id, - headers=self.headers + '/v3/OS-TRUST/trusts/%s' % self.trust_id, headers=self.headers ) self.assertEqual(r.json['trust']['id'], self.trust_id) def test_user_can_list_trusts_for_trustee(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustee_user_id=%s' % - self.trustee_user_id), - headers=self.headers + ( + '/v3/OS-TRUST/trusts?trustee_user_id=%s' + % self.trustee_user_id + ), + headers=self.headers, ) def test_user_can_list_trusts_for_trustor(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustor_user_id=%s' % - self.trustor_user_id), - headers=self.headers + ( + '/v3/OS-TRUST/trusts?trustor_user_id=%s' + % self.trustor_user_id + ), + headers=self.headers, ) def test_user_can_list_trust_roles_for_other_user(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: r = c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, - headers=self.headers + headers=self.headers, ) - self.assertEqual(r.json['roles'][0]['id'], - self.bootstrapper.member_role_id) + self.assertEqual( + r.json['roles'][0]['id'], self.bootstrapper.member_role_id + ) def test_user_can_get_trust_role_for_other_user(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), - headers=self.headers + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), + headers=self.headers, ) @@ -296,18 +300,19 @@ class _SystemReaderMemberTests(_SystemUserTests): '/v3/OS-TRUST/trusts', json=json, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_trust(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -321,9 +326,7 @@ class SystemReaderTests(TrustTests, _SystemReaderMemberTests): system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) @@ -331,7 +334,7 @@ class SystemReaderTests(TrustTests, _SystemReaderMemberTests): auth = self.build_authentication_request( user_id=self.user_id, password=system_reader['password'], - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -352,9 +355,7 @@ class SystemMemberTests(TrustTests, _SystemReaderMemberTests): system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) @@ -362,7 +363,7 @@ class SystemMemberTests(TrustTests, _SystemReaderMemberTests): auth = self.build_authentication_request( user_id=self.user_id, password=system_member['password'], - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -384,7 +385,7 @@ class SystemAdminTests(TrustTests, _AdminTestsMixin, _SystemUserTests): auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -396,74 +397,70 @@ class SystemAdminTests(TrustTests, _AdminTestsMixin, _SystemUserTests): def test_admin_can_delete_trust_for_other_user(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( - '/v3/OS-TRUST/trusts/%s' % ref['id'], - headers=self.headers + '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.headers ) def test_admin_cannot_delete_trust_for_user_overridden_defaults(self): # only the is_admin admin can do this self._override_policy_old_defaults() ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_admin_cannot_get_trust_for_other_user_overridden_defaults(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s' % self.trust_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_admin_cannot_list_roles_for_other_user_overridden_defaults(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_admin_cannot_get_trust_role_for_other_user_overridden(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_list_all_trusts_overridden_defaults(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: - r = c.get( - '/v3/OS-TRUST/trusts', - headers=self.headers - ) + r = c.get('/v3/OS-TRUST/trusts', headers=self.headers) self.assertEqual(1, len(r.json['trusts'])) @@ -473,17 +470,19 @@ class ProjectUserTests(TrustTests): def setUp(self): super(ProjectUserTests, self).setUp() other_user = unit.new_user_ref(domain_id=self.domain_id) - self.other_user_id = PROVIDERS.identity_api.create_user( - other_user)['id'] + self.other_user_id = PROVIDERS.identity_api.create_user(other_user)[ + 'id' + ] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.other_user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.other_user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( user_id=self.other_user_id, password=other_user['password'], - project_id=self.project_id + project_id=self.project_id, ) # Grab a token using another persona who has no trusts associated with # them @@ -493,99 +492,105 @@ class ProjectUserTests(TrustTests): self.other_headers = {'X-Auth-Token': self.token_id} def test_user_can_list_trusts_of_whom_they_are_the_trustor(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: r = c.get( - ('/v3/OS-TRUST/trusts?trustor_user_id=%s' % - self.trustor_user_id), - headers=self.trustor_headers + ( + '/v3/OS-TRUST/trusts?trustor_user_id=%s' + % self.trustor_user_id + ), + headers=self.trustor_headers, ) self.assertEqual(1, len(r.json['trusts'])) self.assertEqual(self.trust_id, r.json['trusts'][0]['id']) def test_user_can_list_trusts_delegated_to_them(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: r = c.get( - ('/v3/OS-TRUST/trusts?trustee_user_id=%s' % - self.trustee_user_id), - headers=self.trustee_headers + ( + '/v3/OS-TRUST/trusts?trustee_user_id=%s' + % self.trustee_user_id + ), + headers=self.trustee_headers, ) self.assertEqual(1, len(r.json['trusts'])) self.assertEqual(self.trust_id, r.json['trusts'][0]['id']) def test_trustor_cannot_list_trusts_for_trustee(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustee_user_id=%s' % - self.trustee_user_id), + ( + '/v3/OS-TRUST/trusts?trustee_user_id=%s' + % self.trustee_user_id + ), headers=self.trustor_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustee_cannot_list_trusts_for_trustor(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustor_user_id=%s' % - self.trustor_user_id), + ( + '/v3/OS-TRUST/trusts?trustor_user_id=%s' + % self.trustor_user_id + ), headers=self.trustee_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_trusts_for_other_trustor(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustor_user_id=%s' % - self.trustor_user_id), + ( + '/v3/OS-TRUST/trusts?trustor_user_id=%s' + % self.trustor_user_id + ), headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_trusts_for_other_trustee(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustee_user_id=%s' % - self.trustee_user_id), + ( + '/v3/OS-TRUST/trusts?trustee_user_id=%s' + % self.trustee_user_id + ), headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_all_trusts(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts', headers=self.trustee_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_another_users_trust(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_get_non_existent_trust_not_found(self): @@ -594,27 +599,29 @@ class ProjectUserTests(TrustTests): c.get( '/v3/OS-TRUST/trusts/%s' % trust_id, headers=self.other_headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) def test_user_can_get_trust_of_whom_they_are_the_trustor(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s' % ref['id'], - headers=self.trustor_headers + headers=self.trustor_headers, ) def test_user_can_get_trust_delegated_to_them(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: r = c.get( '/v3/OS-TRUST/trusts/%s' % ref['id'], - headers=self.trustee_headers + headers=self.trustee_headers, ) self.assertEqual(r.json['trust']['id'], self.trust_id) @@ -624,9 +631,7 @@ class ProjectUserTests(TrustTests): with self.test_client() as c: c.post( - '/v3/OS-TRUST/trusts', - json=json, - headers=self.trustor_headers + '/v3/OS-TRUST/trusts', json=json, headers=self.trustor_headers ) def test_trustee_cannot_create_trust(self): @@ -638,305 +643,320 @@ class ProjectUserTests(TrustTests): '/v3/OS-TRUST/trusts', json=json, headers=self.trustee_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustor_can_delete_trust(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], - headers=self.trustor_headers + headers=self.trustor_headers, ) def test_trustee_cannot_delete_trust(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.trustee_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_trust_for_other_user(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustor_can_list_trust_roles(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: r = c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, - headers=self.trustor_headers + headers=self.trustor_headers, ) - self.assertEqual(r.json['roles'][0]['id'], - self.bootstrapper.member_role_id) + self.assertEqual( + r.json['roles'][0]['id'], self.bootstrapper.member_role_id + ) def test_trustee_can_list_trust_roles(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: r = c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, - headers=self.trustee_headers + headers=self.trustee_headers, ) - self.assertEqual(r.json['roles'][0]['id'], - self.bootstrapper.member_role_id) + self.assertEqual( + r.json['roles'][0]['id'], self.bootstrapper.member_role_id + ) def test_user_cannot_list_trust_roles_for_other_user(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustor_can_get_trust_role(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.head( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), - headers=self.trustor_headers + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), + headers=self.trustor_headers, ) def test_trustee_can_get_trust_role(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.head( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), - headers=self.trustee_headers + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), + headers=self.trustee_headers, ) def test_user_cannot_get_trust_role_for_other_user(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.head( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustor_cannot_list_trusts_for_trustee_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustee_user_id=%s' % - self.trustee_user_id), + ( + '/v3/OS-TRUST/trusts?trustee_user_id=%s' + % self.trustee_user_id + ), headers=self.trustor_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustee_cannot_list_trusts_for_trustor_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustor_user_id=%s' % - self.trustor_user_id), + ( + '/v3/OS-TRUST/trusts?trustor_user_id=%s' + % self.trustor_user_id + ), headers=self.trustee_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_trusts_for_other_trustor_overridden(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustor_user_id=%s' % - self.trustor_user_id), + ( + '/v3/OS-TRUST/trusts?trustor_user_id=%s' + % self.trustor_user_id + ), headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_trusts_for_trustee_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustee_user_id=%s' % - self.trustee_user_id), + ( + '/v3/OS-TRUST/trusts?trustee_user_id=%s' + % self.trustee_user_id + ), headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_all_trusts_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts', headers=self.trustee_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustor_can_delete_trust_overridden_default(self): self._override_policy_old_defaults() ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], - headers=self.trustor_headers + headers=self.trustor_headers, ) def test_trustee_cannot_delete_trust_overridden_default(self): self._override_policy_old_defaults() ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.trustee_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_trust_for_other_user_overridden_default(self): self._override_policy_old_defaults() ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_get_trust_of_whom_they_are_the_trustor_overridden(self): self._override_policy_old_defaults() ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s' % ref['id'], - headers=self.trustor_headers + headers=self.trustor_headers, ) def test_user_can_get_trust_delegated_to_them_overridden_default(self): self._override_policy_old_defaults() ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: r = c.get( '/v3/OS-TRUST/trusts/%s' % ref['id'], - headers=self.trustee_headers + headers=self.trustee_headers, ) self.assertEqual(r.json['trust']['id'], self.trust_id) def test_trustor_can_list_trust_roles_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: r = c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, - headers=self.trustor_headers + headers=self.trustor_headers, ) - self.assertEqual(r.json['roles'][0]['id'], - self.bootstrapper.member_role_id) + self.assertEqual( + r.json['roles'][0]['id'], self.bootstrapper.member_role_id + ) def test_trustee_can_list_trust_roles_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: r = c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, - headers=self.trustee_headers + headers=self.trustee_headers, ) - self.assertEqual(r.json['roles'][0]['id'], - self.bootstrapper.member_role_id) + self.assertEqual( + r.json['roles'][0]['id'], self.bootstrapper.member_role_id + ) def test_user_cannot_list_trust_roles_other_user_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustor_can_get_trust_role_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.head( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), - headers=self.trustor_headers + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), + headers=self.trustor_headers, ) def test_trustee_can_get_trust_role_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.head( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), - headers=self.trustee_headers + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), + headers=self.trustee_headers, ) def test_user_cannot_get_trust_role_other_user_overridden_default(self): self._override_policy_old_defaults() - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.head( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), headers=self.other_headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) @@ -950,17 +970,17 @@ class DomainUserTests(TrustTests): super(DomainUserTests, self).setUp() self.config_fixture.config(group='oslo_policy', enforce_scope=True) domain_admin = unit.new_user_ref(domain_id=self.domain_id) - self.user_id = PROVIDERS.identity_api.create_user( - domain_admin)['id'] + self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( user_id=self.user_id, password=domain_admin['password'], - domain_id=self.domain_id + domain_id=self.domain_id, ) # Grab a token using another persona who has no trusts associated with # them @@ -970,49 +990,51 @@ class DomainUserTests(TrustTests): self.headers = {'X-Auth-Token': self.token_id} def test_trustor_cannot_list_trusts_for_trustee(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustee_user_id=%s' % - self.trustee_user_id), + ( + '/v3/OS-TRUST/trusts?trustee_user_id=%s' + % self.trustee_user_id + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_trustee_cannot_list_trusts_for_trustor(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( - ('/v3/OS-TRUST/trusts?trustor_user_id=%s' % - self.trustor_user_id), + ( + '/v3/OS-TRUST/trusts?trustor_user_id=%s' + % self.trustor_user_id + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_all_trusts(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts', headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_trust(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_get_non_existent_trust_not_found(self): @@ -1021,7 +1043,7 @@ class DomainUserTests(TrustTests): c.get( '/v3/OS-TRUST/trusts/%s' % trust_id, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) def test_user_cannot_create_trust(self): @@ -1035,39 +1057,40 @@ class DomainUserTests(TrustTests): '/v3/OS-TRUST/trusts', json=json, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_trust(self): ref = PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + self.trust_id, **self.trust_data + ) with self.test_client() as c: c.delete( '/v3/OS-TRUST/trusts/%s' % ref['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_trust_roles(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.get( '/v3/OS-TRUST/trusts/%s/roles' % self.trust_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_trust_role(self): - PROVIDERS.trust_api.create_trust( - self.trust_id, **self.trust_data) + PROVIDERS.trust_api.create_trust(self.trust_id, **self.trust_data) with self.test_client() as c: c.head( - ('/v3/OS-TRUST/trusts/%s/roles/%s' % - (self.trust_id, self.bootstrapper.member_role_id)), + ( + '/v3/OS-TRUST/trusts/%s/roles/%s' + % (self.trust_id, self.bootstrapper.member_role_id) + ), headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) diff --git a/keystone/tests/protection/v3/test_users.py b/keystone/tests/protection/v3/test_users.py index dab6ed7f39..dee35c6e73 100644 --- a/keystone/tests/protection/v3/test_users.py +++ b/keystone/tests/protection/v3/test_users.py @@ -52,8 +52,9 @@ class _SystemUserTests(object): def test_user_cannot_get_non_existent_user_not_found(self): with self.test_client() as c: c.get( - '/v3/users/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + '/v3/users/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, ) def test_user_can_list_users(self): @@ -81,14 +82,16 @@ class _SystemMemberAndReaderUserTests(object): create = { 'user': { 'name': uuid.uuid4().hex, - 'domain': CONF.identity.default_domain_id + 'domain': CONF.identity.default_domain_id, } } with self.test_client() as c: c.post( - '/v3/users', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_users(self): @@ -100,8 +103,10 @@ class _SystemMemberAndReaderUserTests(object): update = {'user': {'email': uuid.uuid4().hex}} c.patch( - '/v3/users/%s' % user['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existent_user_forbidden(self): @@ -112,8 +117,10 @@ class _SystemMemberAndReaderUserTests(object): update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % user['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_users(self): @@ -123,15 +130,17 @@ class _SystemMemberAndReaderUserTests(object): with self.test_client() as c: c.delete( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_user_forbidden(self): with self.test_client() as c: c.delete( - '/v3/users/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -158,8 +167,9 @@ class _DomainUserTests(object): with self.test_client() as c: c.get( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_list_users_within_domain(self): @@ -198,16 +208,15 @@ class _DomainMemberAndReaderUserTests(object): def test_user_cannot_create_users_within_domain(self): create = { - 'user': { - 'domain_id': self.domain_id, - 'name': uuid.uuid4().hex - } + 'user': {'domain_id': self.domain_id, 'name': uuid.uuid4().hex} } with self.test_client() as c: c.post( - '/v3/users', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_users_in_other_domain(self): @@ -216,16 +225,15 @@ class _DomainMemberAndReaderUserTests(object): ) create = { - 'user': { - 'domain_id': domain['id'], - 'name': uuid.uuid4().hex - } + 'user': {'domain_id': domain['id'], 'name': uuid.uuid4().hex} } with self.test_client() as c: c.post( - '/v3/users', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_users_within_domain(self): @@ -236,8 +244,10 @@ class _DomainMemberAndReaderUserTests(object): update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % user['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_users_in_other_domain(self): @@ -251,8 +261,10 @@ class _DomainMemberAndReaderUserTests(object): update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % user['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existent_user_forbidden(self): @@ -263,8 +275,10 @@ class _DomainMemberAndReaderUserTests(object): update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % user['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_users_within_domain(self): @@ -274,8 +288,9 @@ class _DomainMemberAndReaderUserTests(object): with self.test_client() as c: c.delete( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_users_in_other_domain(self): @@ -288,15 +303,17 @@ class _DomainMemberAndReaderUserTests(object): with self.test_client() as c: c.delete( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_user_forbidden(self): with self.test_client() as c: c.delete( - '/v3/users/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) @@ -310,8 +327,9 @@ class _ProjectUserTests(object): with self.test_client() as c: c.get( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_users_in_other_domains(self): @@ -324,15 +342,17 @@ class _ProjectUserTests(object): with self.test_client() as c: c.get( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_get_non_existent_user_forbidden(self): with self.test_client() as c: c.get( - '/v3/users/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_users_within_domain(self): @@ -340,7 +360,7 @@ class _ProjectUserTests(object): c.get( '/v3/users?domain_id=%s' % self.domain_id, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_list_users_in_other_domains(self): @@ -355,21 +375,20 @@ class _ProjectUserTests(object): c.get( '/v3/users?domain_id=%s' % domain['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_users_within_domain(self): create = { - 'user': { - 'domain_id': self.domain_id, - 'name': uuid.uuid4().hex - } + 'user': {'domain_id': self.domain_id, 'name': uuid.uuid4().hex} } with self.test_client() as c: c.post( - '/v3/users', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_users_in_other_domains(self): @@ -378,16 +397,15 @@ class _ProjectUserTests(object): ) create = { - 'user': { - 'domain_id': domain['id'], - 'name': uuid.uuid4().hex - } + 'user': {'domain_id': domain['id'], 'name': uuid.uuid4().hex} } with self.test_client() as c: c.post( - '/v3/users', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_users_within_domain(self): @@ -398,8 +416,10 @@ class _ProjectUserTests(object): update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % user['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_users_in_other_domain(self): @@ -413,17 +433,20 @@ class _ProjectUserTests(object): update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % user['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existent_user_forbidden(self): update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % uuid.uuid4().hex, json=update, + '/v3/users/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_users_within_domain(self): @@ -433,8 +456,9 @@ class _ProjectUserTests(object): with self.test_client() as c: c.delete( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_users_in_other_domains(self): @@ -447,23 +471,27 @@ class _ProjectUserTests(object): with self.test_client() as c: c.delete( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_user_forbidden(self): with self.test_client() as c: c.delete( - '/v3/users/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class SystemReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _SystemUserTests, - _SystemMemberAndReaderUserTests): +class SystemReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _SystemUserTests, + _SystemMemberAndReaderUserTests, +): def setUp(self): super(SystemReaderTests, self).setUp() @@ -474,16 +502,15 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, system_reader = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_reader - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_reader)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.reader_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_reader['password'], - system=True + user_id=self.user_id, + password=system_reader['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -494,11 +521,13 @@ class SystemReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _SystemUserTests, - _SystemMemberAndReaderUserTests): +class SystemMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _SystemUserTests, + _SystemMemberAndReaderUserTests, +): def setUp(self): super(SystemMemberTests, self).setUp() @@ -509,16 +538,15 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, system_member = unit.new_user_ref( domain_id=CONF.identity.default_domain_id ) - self.user_id = PROVIDERS.identity_api.create_user( - system_member - )['id'] + self.user_id = PROVIDERS.identity_api.create_user(system_member)['id'] PROVIDERS.assignment_api.create_system_grant_for_user( self.user_id, self.bootstrapper.member_role_id ) auth = self.build_authentication_request( - user_id=self.user_id, password=system_member['password'], - system=True + user_id=self.user_id, + password=system_member['password'], + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -529,10 +557,12 @@ class SystemMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class SystemAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _SystemUserTests): +class SystemAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _SystemUserTests, +): def setUp(self): super(SystemAdminTests, self).setUp() @@ -544,7 +574,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - system=True + system=True, ) # Grab a token using the persona we're testing and prepare headers @@ -558,7 +588,7 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, create = { 'user': { 'name': uuid.uuid4().hex, - 'domain': CONF.identity.default_domain_id + 'domain': CONF.identity.default_domain_id, } } @@ -580,9 +610,10 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % uuid.uuid4().hex, json=update, + '/v3/users/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + expected_status_code=http.client.NOT_FOUND, ) def test_user_can_delete_users(self): @@ -596,16 +627,19 @@ class SystemAdminTests(base_classes.TestCaseWithBootstrap, def test_user_cannot_delete_non_existent_user_not_found(self): with self.test_client() as c: c.delete( - '/v3/users/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.NOT_FOUND + '/v3/users/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.NOT_FOUND, ) -class DomainReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _DomainUserTests, - _DomainMemberAndReaderUserTests): +class DomainReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _DomainUserTests, + _DomainMemberAndReaderUserTests, +): def setUp(self): super(DomainReaderTests, self).setUp() @@ -620,12 +654,14 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, domain_reader = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_reader)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_reader['password'], + user_id=self.user_id, + password=domain_reader['password'], domain_id=self.domain_id, ) @@ -637,11 +673,13 @@ class DomainReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _DomainUserTests, - _DomainMemberAndReaderUserTests): +class DomainMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _DomainUserTests, + _DomainMemberAndReaderUserTests, +): def setUp(self): super(DomainMemberTests, self).setUp() @@ -656,13 +694,15 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, domain_user = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_user)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_user['password'], - domain_id=self.domain_id + user_id=self.user_id, + password=domain_user['password'], + domain_id=self.domain_id, ) # Grab a token using the persona we're testing and prepare headers @@ -673,10 +713,12 @@ class DomainMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class DomainAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _DomainUserTests): +class DomainAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _DomainUserTests, +): def setUp(self): super(DomainAdminTests, self).setUp() @@ -700,12 +742,14 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, domain_admin = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.admin_role_id, user_id=self.user_id, - domain_id=self.domain_id + self.bootstrapper.admin_role_id, + user_id=self.user_id, + domain_id=self.domain_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=domain_admin['password'], + user_id=self.user_id, + password=domain_admin['password'], domain_id=self.domain_id, ) @@ -731,16 +775,13 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, 'identity:list_users': up.SYSTEM_READER_OR_DOMAIN_READER, 'identity:create_user': up.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, 'identity:update_user': up.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, - 'identity:delete_user': up.SYSTEM_ADMIN_OR_DOMAIN_ADMIN + 'identity:delete_user': up.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, } f.write(jsonutils.dumps(overridden_policies)) def test_user_can_create_users_within_domain(self): create = { - 'user': { - 'domain_id': self.domain_id, - 'name': uuid.uuid4().hex - } + 'user': {'domain_id': self.domain_id, 'name': uuid.uuid4().hex} } with self.test_client() as c: @@ -756,16 +797,15 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) create = { - 'user': { - 'domain-id': domain['id'], - 'name': uuid.uuid4().hex - } + 'user': {'domain-id': domain['id'], 'name': uuid.uuid4().hex} } with self.test_client() as c: c.post( - '/v3/users', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_create_users_in_other_domain(self): @@ -774,16 +814,15 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) create = { - 'user': { - 'domain_id': domain['id'], - 'name': uuid.uuid4().hex - } + 'user': {'domain_id': domain['id'], 'name': uuid.uuid4().hex} } with self.test_client() as c: c.post( - '/v3/users', json=create, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users', + json=create, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_update_users_within_domain(self): @@ -828,17 +867,20 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % user['id'], json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + json=update, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_update_non_existent_user_forbidden(self): update = {'user': {'email': uuid.uuid4().hex}} with self.test_client() as c: c.patch( - '/v3/users/%s' % uuid.uuid4().hex, json=update, + '/v3/users/%s' % uuid.uuid4().hex, + json=update, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + expected_status_code=http.client.FORBIDDEN, ) def test_user_can_delete_users_within_domain(self): @@ -847,9 +889,7 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, ) with self.test_client() as c: - c.delete( - '/v3/users/%s' % user['id'], headers=self.headers - ) + c.delete('/v3/users/%s' % user['id'], headers=self.headers) def test_user_cannot_delete_users_in_other_domain(self): domain = PROVIDERS.resource_api.create_domain( @@ -861,22 +901,26 @@ class DomainAdminTests(base_classes.TestCaseWithBootstrap, with self.test_client() as c: c.delete( - '/v3/users/%s' % user['id'], headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % user['id'], + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) def test_user_cannot_delete_non_existent_user_forbidden(self): with self.test_client() as c: c.delete( - '/v3/users/%s' % uuid.uuid4().hex, headers=self.headers, - expected_status_code=http.client.FORBIDDEN + '/v3/users/%s' % uuid.uuid4().hex, + headers=self.headers, + expected_status_code=http.client.FORBIDDEN, ) -class ProjectReaderTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _ProjectUserTests): +class ProjectReaderTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _ProjectUserTests, +): def setUp(self): super(ProjectReaderTests, self).setUp() @@ -896,12 +940,14 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, project_reader = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(project_reader)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.reader_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.reader_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=project_reader['password'], + user_id=self.user_id, + password=project_reader['password'], project_id=self.project_id, ) @@ -913,10 +959,12 @@ class ProjectReaderTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectMemberTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _ProjectUserTests): +class ProjectMemberTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _ProjectUserTests, +): def setUp(self): super(ProjectMemberTests, self).setUp() @@ -936,12 +984,14 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, project_member = unit.new_user_ref(domain_id=self.domain_id) self.user_id = PROVIDERS.identity_api.create_user(project_member)['id'] PROVIDERS.assignment_api.create_grant( - self.bootstrapper.member_role_id, user_id=self.user_id, - project_id=self.project_id + self.bootstrapper.member_role_id, + user_id=self.user_id, + project_id=self.project_id, ) auth = self.build_authentication_request( - user_id=self.user_id, password=project_member['password'], + user_id=self.user_id, + password=project_member['password'], project_id=self.project_id, ) @@ -953,10 +1003,12 @@ class ProjectMemberTests(base_classes.TestCaseWithBootstrap, self.headers = {'X-Auth-Token': self.token_id} -class ProjectAdminTests(base_classes.TestCaseWithBootstrap, - common_auth.AuthTestMixin, - _CommonUserTests, - _ProjectUserTests): +class ProjectAdminTests( + base_classes.TestCaseWithBootstrap, + common_auth.AuthTestMixin, + _CommonUserTests, + _ProjectUserTests, +): def setUp(self): super(ProjectAdminTests, self).setUp() @@ -982,7 +1034,7 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, auth = self.build_authentication_request( user_id=self.user_id, password=self.bootstrapper.admin_password, - project_id=self.bootstrapper.project_id + project_id=self.bootstrapper.project_id, ) # Grab a token using the persona we're testing and prepare headers @@ -1007,6 +1059,6 @@ class ProjectAdminTests(base_classes.TestCaseWithBootstrap, 'identity:list_users': up.SYSTEM_READER_OR_DOMAIN_READER, 'identity:create_user': up.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, 'identity:update_user': up.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, - 'identity:delete_user': up.SYSTEM_ADMIN_OR_DOMAIN_ADMIN + 'identity:delete_user': up.SYSTEM_ADMIN_OR_DOMAIN_ADMIN, } f.write(jsonutils.dumps(overridden_policies)) diff --git a/keystone/tests/unit/application_credential/backends/test_sql.py b/keystone/tests/unit/application_credential/backends/test_sql.py index 26309fbae6..8245955761 100644 --- a/keystone/tests/unit/application_credential/backends/test_sql.py +++ b/keystone/tests/unit/application_credential/backends/test_sql.py @@ -23,39 +23,48 @@ PROVIDERS = provider_api.ProviderAPIs class SQLModelTestCase(core_sql.BaseBackendSqlModels): def test_application_credential_model(self): - cols = (('internal_id', sql.Integer, None), - ('id', sql.String, 64), - ('name', sql.String, 255), - ('secret_hash', sql.String, 255), - ('description', sql.Text, None), - ('user_id', sql.String, 64), - ('project_id', sql.String, 64), - ('system', sql.String, 64), - ('expires_at', sql.DateTimeInt, None)) + cols = ( + ('internal_id', sql.Integer, None), + ('id', sql.String, 64), + ('name', sql.String, 255), + ('secret_hash', sql.String, 255), + ('description', sql.Text, None), + ('user_id', sql.String, 64), + ('project_id', sql.String, 64), + ('system', sql.String, 64), + ('expires_at', sql.DateTimeInt, None), + ) self.assertExpectedSchema('application_credential', cols) def test_application_credential_role_model(self): - cols = (('application_credential_id', sql.Integer, None), - ('role_id', sql.String, 64)) + cols = ( + ('application_credential_id', sql.Integer, None), + ('role_id', sql.String, 64), + ) self.assertExpectedSchema('application_credential_role', cols) def test_access_rule_model(self): - cols = (('id', sql.Integer, None), - ('external_id', sql.String, 64), - ('user_id', sql.String, 64), - ('service', sql.String, 64), - ('path', sql.String, 128), - ('method', sql.String, 16)) + cols = ( + ('id', sql.Integer, None), + ('external_id', sql.String, 64), + ('user_id', sql.String, 64), + ('service', sql.String, 64), + ('path', sql.String, 128), + ('method', sql.String, 16), + ) self.assertExpectedSchema('access_rule', cols) def test_application_credential_access_rule_model(self): - cols = (('application_credential_id', sql.Integer, None), - ('access_rule_id', sql.Integer, None)) + cols = ( + ('application_credential_id', sql.Integer, None), + ('access_rule_id', sql.Integer, None), + ) self.assertExpectedSchema('application_credential_access_rule', cols) -class SQLDriverTestCase(core_sql.BaseBackendSqlTests, - test_backends.ApplicationCredentialTests): +class SQLDriverTestCase( + core_sql.BaseBackendSqlTests, test_backends.ApplicationCredentialTests +): def setUp(self): self.useFixture(database.Database()) self.driver = sql_driver.ApplicationCredential() diff --git a/keystone/tests/unit/application_credential/test_backends.py b/keystone/tests/unit/application_credential/test_backends.py index a7560b34de..78c7e64a6e 100644 --- a/keystone/tests/unit/application_credential/test_backends.py +++ b/keystone/tests/unit/application_credential/test_backends.py @@ -27,8 +27,9 @@ PROVIDERS = provider_api.ProviderAPIs class ApplicationCredentialTests(object): - def _new_app_cred_data(self, user_id, project_id=None, name=None, - expires=None, system=None): + def _new_app_cred_data( + self, user_id, project_id=None, name=None, expires=None, system=None + ): if not name: name = uuid.uuid4().hex if not expires: @@ -49,13 +50,14 @@ class ApplicationCredentialTests(object): {'id': self.role__member_['id']}, ], 'secret': uuid.uuid4().hex, - 'unrestricted': False + 'unrestricted': False, } return app_cred_data def test_create_application_credential(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) resp = self.app_cred_api.create_application_credential(app_cred) resp_roles = resp.pop('roles') orig_roles = app_cred.pop('roles') @@ -65,29 +67,36 @@ class ApplicationCredentialTests(object): def test_create_duplicate_application_credential_fails(self): # Ensure a user can't create two application credentials with the same # name - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) name = app_cred['name'] self.app_cred_api.create_application_credential(app_cred) - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id'], - name=name) - self.assertRaises(exception.Conflict, - self.app_cred_api.create_application_credential, - app_cred) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'], name=name + ) + self.assertRaises( + exception.Conflict, + self.app_cred_api.create_application_credential, + app_cred, + ) def test_create_application_credential_require_role_assignments(self): # Ensure a user can't create an application credential for a project # they don't have a role assignment on - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_baz['id']) - self.assertRaises(exception.RoleAssignmentNotFound, - self.app_cred_api.create_application_credential, - app_cred) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_baz['id'] + ) + self.assertRaises( + exception.RoleAssignmentNotFound, + self.app_cred_api.create_application_credential, + app_cred, + ) def test_application_credential_allow_recursion(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) app_cred['unrestricted'] = True resp = self.app_cred_api.create_application_credential(app_cred) resp.pop('roles') @@ -97,25 +106,31 @@ class ApplicationCredentialTests(object): def test_application_credential_limits(self): config_fixture_ = self.user = self.useFixture(config_fixture.Config()) config_fixture_.config(group='application_credential', user_limit=2) - app_cred = self._new_app_cred_data(self.user_foo['id'], - self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], self.project_bar['id'] + ) self.app_cred_api.create_application_credential(app_cred) app_cred['name'] = 'two' self.app_cred_api.create_application_credential(app_cred) app_cred['name'] = 'three' - self.assertRaises(exception.ApplicationCredentialLimitExceeded, - self.app_cred_api.create_application_credential, - app_cred) + self.assertRaises( + exception.ApplicationCredentialLimitExceeded, + self.app_cred_api.create_application_credential, + app_cred, + ) def test_create_application_credential_with_access_rules(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) - app_cred['access_rules'] = [{ - 'id': uuid.uuid4().hex, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) + app_cred['access_rules'] = [ + { + 'id': uuid.uuid4().hex, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ] resp = self.app_cred_api.create_application_credential(app_cred) resp.pop('roles') resp_access_rules = resp.pop('access_rules') @@ -126,26 +141,31 @@ class ApplicationCredentialTests(object): self.assertDictEqual(orig_access_rules[i], ar) def test_create_application_credential_with_preexisting_access_rules(self): - app_cred_1 = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) - app_cred_1['access_rules'] = [{ - 'id': uuid.uuid4().hex, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + app_cred_1 = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) + app_cred_1['access_rules'] = [ + { + 'id': uuid.uuid4().hex, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ] resp = self.app_cred_api.create_application_credential(app_cred_1) resp_access_rules_1 = resp.pop('access_rules') - app_cred_2 = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred_2 = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) app_cred_2['access_rules'] = [{'id': resp_access_rules_1[0]['id']}] resp = self.app_cred_api.create_application_credential(app_cred_2) resp_access_rules_2 = resp.pop('access_rules') self.assertDictEqual(resp_access_rules_1[0], resp_access_rules_2[0]) def test_get_application_credential(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) create_resp = self.app_cred_api.create_application_credential(app_cred) app_cred_id = create_resp['id'] get_resp = self.app_cred_api.get_application_credential(app_cred_id) @@ -153,26 +173,29 @@ class ApplicationCredentialTests(object): self.assertDictEqual(create_resp, get_resp) def test_get_application_credential_not_found(self): - self.assertRaises(exception.ApplicationCredentialNotFound, - self.app_cred_api.get_application_credential, - uuid.uuid4().hex) + self.assertRaises( + exception.ApplicationCredentialNotFound, + self.app_cred_api.get_application_credential, + uuid.uuid4().hex, + ) def test_list_application_credentials(self): - app_cred_1 = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id'], - name='app1') - app_cred_2 = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id'], - name='app2') - app_cred_3 = self._new_app_cred_data(self.user_two['id'], - project_id=self.project_baz['id'], - name='app3') + app_cred_1 = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'], name='app1' + ) + app_cred_2 = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'], name='app2' + ) + app_cred_3 = self._new_app_cred_data( + self.user_two['id'], project_id=self.project_baz['id'], name='app3' + ) resp1 = self.app_cred_api.create_application_credential(app_cred_1) resp2 = self.app_cred_api.create_application_credential(app_cred_2) resp3 = self.app_cred_api.create_application_credential(app_cred_3) hints = driver_hints.Hints() resp = self.app_cred_api.list_application_credentials( - self.user_foo['id'], hints) + self.user_foo['id'], hints + ) resp_ids = [ac['id'] for ac in resp] self.assertIn(resp1['id'], resp_ids) self.assertIn(resp2['id'], resp_ids) @@ -182,13 +205,15 @@ class ApplicationCredentialTests(object): def _list_ids(self, user): hints = driver_hints.Hints() - resp = self.app_cred_api.list_application_credentials(user['id'], - hints) + resp = self.app_cred_api.list_application_credentials( + user['id'], hints + ) return [ac['id'] for ac in resp] def test_delete_application_credential(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) self.app_cred_api.create_application_credential(app_cred) # cache the information @@ -199,22 +224,26 @@ class ApplicationCredentialTests(object): self.assertNotIn(app_cred['id'], self._list_ids(self.user_foo)) # the cache information has been invalidated. - self.assertRaises(exception.ApplicationCredentialNotFound, - self.app_cred_api.get_application_credential, - app_cred['id']) + self.assertRaises( + exception.ApplicationCredentialNotFound, + self.app_cred_api.get_application_credential, + app_cred['id'], + ) def test_delete_application_credential_not_found(self): - self.assertRaises(exception.ApplicationCredentialNotFound, - self.app_cred_api.delete_application_credential, - uuid.uuid4().hex) + self.assertRaises( + exception.ApplicationCredentialNotFound, + self.app_cred_api.delete_application_credential, + uuid.uuid4().hex, + ) def test_deleting_a_user_deletes_application_credentials(self): - app_cred_1 = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id'], - name='app1') - app_cred_2 = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id'], - name='app2') + app_cred_1 = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'], name='app1' + ) + app_cred_2 = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'], name='app2' + ) self.app_cred_api.create_application_credential(app_cred_1) self.app_cred_api.create_application_credential(app_cred_2) self.assertIn(app_cred_1['id'], self._list_ids(self.user_foo)) @@ -230,31 +259,39 @@ class ApplicationCredentialTests(object): PROVIDERS.identity_api.delete_user(self.user_foo['id']) hints = driver_hints.Hints() self.assertListEqual( - [], self.app_cred_api.list_application_credentials( - self.user_foo['id'], hints)) + [], + self.app_cred_api.list_application_credentials( + self.user_foo['id'], hints + ), + ) # the cache information has been invalidated. - self.assertRaises(exception.ApplicationCredentialNotFound, - self.app_cred_api.get_application_credential, - app_cred_1['id']) - self.assertRaises(exception.ApplicationCredentialNotFound, - self.app_cred_api.get_application_credential, - app_cred_2['id']) + self.assertRaises( + exception.ApplicationCredentialNotFound, + self.app_cred_api.get_application_credential, + app_cred_1['id'], + ) + self.assertRaises( + exception.ApplicationCredentialNotFound, + self.app_cred_api.get_application_credential, + app_cred_2['id'], + ) def test_removing_user_from_project_deletes_application_credentials(self): app_cred_proj_A_1 = self._new_app_cred_data( - self.user_foo['id'], project_id=self.project_bar['id'], - name='app1') + self.user_foo['id'], project_id=self.project_bar['id'], name='app1' + ) app_cred_proj_A_2 = self._new_app_cred_data( - self.user_foo['id'], project_id=self.project_bar['id'], - name='app2') + self.user_foo['id'], project_id=self.project_bar['id'], name='app2' + ) app_cred_proj_B = self._new_app_cred_data( - self.user_foo['id'], project_id=self.project_baz['id'], - name='app3') + self.user_foo['id'], project_id=self.project_baz['id'], name='app3' + ) PROVIDERS.assignment_api.add_role_to_user_and_project( project_id=self.project_baz['id'], user_id=self.user_foo['id'], - role_id=self.role__member_['id']) + role_id=self.role__member_['id'], + ) self.app_cred_api.create_application_credential(app_cred_proj_A_1) self.app_cred_api.create_application_credential(app_cred_proj_A_2) self.app_cred_api.create_application_credential(app_cred_proj_B) @@ -273,95 +310,140 @@ class ApplicationCredentialTests(object): PROVIDERS.assignment_api.remove_role_from_user_and_project( user_id=self.user_foo['id'], project_id=self.project_bar['id'], - role_id=self.role__member_['id']) - self.assertNotIn(app_cred_proj_A_1['id'], - self._list_ids(self.user_foo)) - self.assertNotIn(app_cred_proj_A_2['id'], - self._list_ids(self.user_foo)) + role_id=self.role__member_['id'], + ) + self.assertNotIn( + app_cred_proj_A_1['id'], self._list_ids(self.user_foo) + ) + self.assertNotIn( + app_cred_proj_A_2['id'], self._list_ids(self.user_foo) + ) self.assertIn(app_cred_proj_B['id'], self._list_ids(self.user_foo)) # the cache information has been invalidated only for the deleted # application credential. - self.assertRaises(exception.ApplicationCredentialNotFound, - self.app_cred_api.get_application_credential, - app_cred_proj_A_1['id']) - self.assertRaises(exception.ApplicationCredentialNotFound, - self.app_cred_api.get_application_credential, - app_cred_proj_A_2['id']) - self.assertEqual(app_cred_proj_B['id'], - self.app_cred_api.get_application_credential( - app_cred_proj_B['id'])['id']) + self.assertRaises( + exception.ApplicationCredentialNotFound, + self.app_cred_api.get_application_credential, + app_cred_proj_A_1['id'], + ) + self.assertRaises( + exception.ApplicationCredentialNotFound, + self.app_cred_api.get_application_credential, + app_cred_proj_A_2['id'], + ) + self.assertEqual( + app_cred_proj_B['id'], + self.app_cred_api.get_application_credential( + app_cred_proj_B['id'] + )['id'], + ) def test_authenticate(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) resp = self.app_cred_api.create_application_credential(app_cred) self.app_cred_api.authenticate(resp['id'], resp['secret']) def test_authenticate_not_found(self): - self.assertRaises(AssertionError, - self.app_cred_api.authenticate, - uuid.uuid4().hex, - uuid.uuid4().hex) + self.assertRaises( + AssertionError, + self.app_cred_api.authenticate, + uuid.uuid4().hex, + uuid.uuid4().hex, + ) def test_authenticate_expired(self): yesterday = datetime.datetime.utcnow() - datetime.timedelta(days=1) - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id'], - expires=yesterday) + app_cred = self._new_app_cred_data( + self.user_foo['id'], + project_id=self.project_bar['id'], + expires=yesterday, + ) resp = self.app_cred_api.create_application_credential(app_cred) - self.assertRaises(AssertionError, - self.app_cred_api.authenticate, - resp['id'], - resp['secret']) + self.assertRaises( + AssertionError, + self.app_cred_api.authenticate, + resp['id'], + resp['secret'], + ) def test_authenticate_bad_secret(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) resp = self.app_cred_api.create_application_credential(app_cred) badpass = 'badpass' self.assertNotEqual(badpass, resp['secret']) - self.assertRaises(AssertionError, - self.app_cred_api.authenticate, - resp['id'], - badpass) + self.assertRaises( + AssertionError, self.app_cred_api.authenticate, resp['id'], badpass + ) def test_get_delete_access_rules(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) access_rule_id = uuid.uuid4().hex - app_cred['access_rules'] = [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + app_cred['access_rules'] = [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ] self.app_cred_api.create_application_credential(app_cred) - self.assertDictEqual(app_cred['access_rules'][0], - self.app_cred_api.get_access_rule(access_rule_id)) + self.assertDictEqual( + app_cred['access_rules'][0], + self.app_cred_api.get_access_rule(access_rule_id), + ) self.app_cred_api.delete_application_credential(app_cred['id']) self.app_cred_api.delete_access_rule(access_rule_id) - self.assertRaises(exception.AccessRuleNotFound, - self.app_cred_api.get_access_rule, - access_rule_id) + self.assertRaises( + exception.AccessRuleNotFound, + self.app_cred_api.get_access_rule, + access_rule_id, + ) def test_list_delete_access_rule_for_user(self): - app_cred = self._new_app_cred_data(self.user_foo['id'], - project_id=self.project_bar['id']) + app_cred = self._new_app_cred_data( + self.user_foo['id'], project_id=self.project_bar['id'] + ) access_rule_id = uuid.uuid4().hex - app_cred['access_rules'] = [{ - 'id': access_rule_id, - 'service': uuid.uuid4().hex, - 'path': uuid.uuid4().hex, - 'method': uuid.uuid4().hex[16:] - }] + app_cred['access_rules'] = [ + { + 'id': access_rule_id, + 'service': uuid.uuid4().hex, + 'path': uuid.uuid4().hex, + 'method': uuid.uuid4().hex[16:], + } + ] self.app_cred_api.create_application_credential(app_cred) - self.assertEqual(1, len(self.app_cred_api.list_access_rules_for_user( - self.user_foo['id']))) + self.assertEqual( + 1, + len( + self.app_cred_api.list_access_rules_for_user( + self.user_foo['id'] + ) + ), + ) self.app_cred_api.delete_application_credential(app_cred['id']) # access rule should still exist - self.assertEqual(1, len(self.app_cred_api.list_access_rules_for_user( - self.user_foo['id']))) + self.assertEqual( + 1, + len( + self.app_cred_api.list_access_rules_for_user( + self.user_foo['id'] + ) + ), + ) self.app_cred_api.delete_access_rules_for_user(self.user_foo['id']) - self.assertEqual(0, len(self.app_cred_api.list_access_rules_for_user( - self.user_foo['id']))) + self.assertEqual( + 0, + len( + self.app_cred_api.list_access_rules_for_user( + self.user_foo['id'] + ) + ), + ) diff --git a/keystone/tests/unit/assignment/role_backends/test_sql.py b/keystone/tests/unit/assignment/role_backends/test_sql.py index 4baf904a60..ba2ff61eb0 100644 --- a/keystone/tests/unit/assignment/role_backends/test_sql.py +++ b/keystone/tests/unit/assignment/role_backends/test_sql.py @@ -25,9 +25,11 @@ PROVIDERS = provider_api.ProviderAPIs class SqlRoleModels(core_sql.BaseBackendSqlModels): def test_role_model(self): - cols = (('id', sql.String, 64), - ('name', sql.String, 255), - ('domain_id', sql.String, 64)) + cols = ( + ('id', sql.String, 64), + ('name', sql.String, 255), + ('domain_id', sql.String, 64), + ) self.assertExpectedSchema('role', cols) @@ -35,24 +37,27 @@ class SqlRole(core_sql.BaseBackendSqlTests, test_core.RoleTests): def test_create_null_role_name(self): role = unit.new_role_ref(name=None) - self.assertRaises(exception.UnexpectedError, - PROVIDERS.role_api.create_role, - role['id'], - role) - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.get_role, - role['id']) + self.assertRaises( + exception.UnexpectedError, + PROVIDERS.role_api.create_role, + role['id'], + role, + ) + self.assertRaises( + exception.RoleNotFound, PROVIDERS.role_api.get_role, role['id'] + ) def test_create_duplicate_role_domain_specific_name_fails(self): domain = unit.new_domain_ref() role1 = unit.new_role_ref(domain_id=domain['id']) PROVIDERS.role_api.create_role(role1['id'], role1) - role2 = unit.new_role_ref(name=role1['name'], - domain_id=domain['id']) - self.assertRaises(exception.Conflict, - PROVIDERS.role_api.create_role, - role2['id'], - role2) + role2 = unit.new_role_ref(name=role1['name'], domain_id=domain['id']) + self.assertRaises( + exception.Conflict, + PROVIDERS.role_api.create_role, + role2['id'], + role2, + ) def test_update_domain_id_of_role_fails(self): # Create a global role @@ -61,10 +66,12 @@ class SqlRole(core_sql.BaseBackendSqlTests, test_core.RoleTests): # Try and update it to be domain specific domainA = unit.new_domain_ref() role1['domain_id'] = domainA['id'] - self.assertRaises(exception.ValidationError, - PROVIDERS.role_api.update_role, - role1['id'], - role1) + self.assertRaises( + exception.ValidationError, + PROVIDERS.role_api.update_role, + role1['id'], + role1, + ) # Create a domain specific role from scratch role2 = unit.new_role_ref(domain_id=domainA['id']) @@ -72,16 +79,20 @@ class SqlRole(core_sql.BaseBackendSqlTests, test_core.RoleTests): # Try to "move" it to another domain domainB = unit.new_domain_ref() role2['domain_id'] = domainB['id'] - self.assertRaises(exception.ValidationError, - PROVIDERS.role_api.update_role, - role2['id'], - role2) + self.assertRaises( + exception.ValidationError, + PROVIDERS.role_api.update_role, + role2['id'], + role2, + ) # Now try to make it global role2['domain_id'] = None - self.assertRaises(exception.ValidationError, - PROVIDERS.role_api.update_role, - role2['id'], - role2) + self.assertRaises( + exception.ValidationError, + PROVIDERS.role_api.update_role, + role2['id'], + role2, + ) def test_domain_specific_separation(self): domain1 = unit.new_domain_ref() @@ -108,8 +119,8 @@ class SqlRole(core_sql.BaseBackendSqlTests, test_core.RoleTests): self.assertDictEqual(role3, role_ref3) # Check that deleting one of these, doesn't affect the others PROVIDERS.role_api.delete_role(role1['id']) - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.get_role, - role1['id']) + self.assertRaises( + exception.RoleNotFound, PROVIDERS.role_api.get_role, role1['id'] + ) PROVIDERS.role_api.get_role(role2['id']) PROVIDERS.role_api.get_role(role3['id']) diff --git a/keystone/tests/unit/assignment/test_backends.py b/keystone/tests/unit/assignment/test_backends.py index 119bcf7ab3..7893de1c96 100644 --- a/keystone/tests/unit/assignment/test_backends.py +++ b/keystone/tests/unit/assignment/test_backends.py @@ -137,8 +137,9 @@ class AssignmentTestHelperMixin(object): """ - def _handle_project_spec(self, test_data, domain_id, project_spec, - parent_id=None): + def _handle_project_spec( + self, test_data, domain_id, project_spec, parent_id=None + ): """Handle the creation of a project or hierarchy of projects. project_spec may either be a count of the number of projects to @@ -150,9 +151,11 @@ class AssignmentTestHelperMixin(object): hierarchy of projects. """ + def _create_project(domain_id, parent_id): - new_project = unit.new_project_ref(domain_id=domain_id, - parent_id=parent_id) + new_project = unit.new_project_ref( + domain_id=domain_id, parent_id=parent_id + ) new_project = PROVIDERS.resource_api.create_project( new_project['id'], new_project ) @@ -161,17 +164,22 @@ class AssignmentTestHelperMixin(object): if isinstance(project_spec, list): for this_spec in project_spec: self._handle_project_spec( - test_data, domain_id, this_spec, parent_id=parent_id) + test_data, domain_id, this_spec, parent_id=parent_id + ) elif isinstance(project_spec, dict): new_proj = _create_project(domain_id, parent_id) test_data['projects'].append(new_proj) self._handle_project_spec( - test_data, domain_id, project_spec['project'], - parent_id=new_proj['id']) + test_data, + domain_id, + project_spec['project'], + parent_id=new_proj['id'], + ) else: for _ in range(project_spec): test_data['projects'].append( - _create_project(domain_id, parent_id)) + _create_project(domain_id, parent_id) + ) def _create_role(self, domain_id=None): new_role = unit.new_role_ref(domain_id=domain_id) @@ -190,6 +198,7 @@ class AssignmentTestHelperMixin(object): This method will insert any entities created into test_data """ + def _create_domain(domain_id=None): if domain_id is None: new_domain = unit.new_domain_ref() @@ -232,13 +241,16 @@ class AssignmentTestHelperMixin(object): # If it's projects, we need to handle the potential # specification of a project hierarchy self._handle_project_spec( - test_data, the_domain['id'], value) + test_data, the_domain['id'], value + ) else: # It's a count of number of entities for _ in range(value): test_data[entity_type].append( _create_entity_in_domain( - entity_type, the_domain['id'])) + entity_type, the_domain['id'] + ) + ) else: for _ in range(domain_spec): test_data['domains'].append(_create_domain()) @@ -286,8 +298,9 @@ class AssignmentTestHelperMixin(object): """ expanded_key = '%s_id' % key reference_index = '%ss' % key - index_value = ( - reference_data[reference_index][shorthand_data[key]]['id']) + index_value = reference_data[reference_index][shorthand_data[key]][ + 'id' + ] return expanded_key, index_value def create_implied_roles(self, implied_pattern, test_data): @@ -305,8 +318,9 @@ class AssignmentTestHelperMixin(object): prior_role, implied_role ) else: - implied_role = ( - test_data['roles'][implied_spec['implied_roles']]['id']) + implied_role = test_data['roles'][ + implied_spec['implied_roles'] + ]['id'] PROVIDERS.role_api.create_implied_role( prior_role, implied_role ) @@ -333,8 +347,9 @@ class AssignmentTestHelperMixin(object): # First store how many assignments are already in the system, # so during the tests we can check the number of new assignments # created. - test_data['initial_assignment_count'] = ( - len(PROVIDERS.assignment_api.list_role_assignments())) + test_data['initial_assignment_count'] = len( + PROVIDERS.assignment_api.list_role_assignments() + ) # Now create the new assignments in the test plan for assignment in assignment_pattern: @@ -355,13 +370,15 @@ class AssignmentTestHelperMixin(object): # Turn 'entity : 0' into 'entity_id = ac6736ba873d' # where entity in user, group, project or domain key, value = self._convert_entity_shorthand( - param, assignment, test_data) + param, assignment, test_data + ) args[key] = value PROVIDERS.assignment_api.create_grant(**args) return test_data def execute_assignment_cases(self, test_plan, test_data): """Execute the test plan, based on the created test_data.""" + def check_results(expected, actual, param_arg_count): if param_arg_count == 0: # It was an unfiltered call, so default fixture assignments @@ -369,7 +386,8 @@ class AssignmentTestHelperMixin(object): # how many assignments there were before the test. self.assertEqual( len(expected) + test_data['initial_assignment_count'], - len(actual)) + len(actual), + ) else: self.assertThat(actual, matchers.HasLength(len(expected))) @@ -386,23 +404,23 @@ class AssignmentTestHelperMixin(object): indirect_term = {} for indirect_param in each_expected[param]: key, value = self._convert_entity_shorthand( - indirect_param, each_expected[param], - test_data) + indirect_param, each_expected[param], test_data + ) indirect_term[key] = value expected_assignment[param] = indirect_term else: # Convert a simple shorthand entry into a full # entity reference key, value = self._convert_entity_shorthand( - param, each_expected, test_data) + param, each_expected, test_data + ) expected_assignment[key] = value self.assertIn(expected_assignment, actual) def convert_group_ids_sourced_from_list(index_list, reference_data): value_list = [] for group_index in index_list: - value_list.append( - reference_data['groups'][group_index]['id']) + value_list.append(reference_data['groups'][group_index]['id']) return value_list # Go through each test in the array, processing the input params, which @@ -417,12 +435,14 @@ class AssignmentTestHelperMixin(object): elif param == 'source_from_group_ids': # Convert the list of indexes into a list of IDs args[param] = convert_group_ids_sourced_from_list( - test['params']['source_from_group_ids'], test_data) + test['params']['source_from_group_ids'], test_data + ) else: # Turn 'entity : 0' into 'entity_id = ac6736ba873d' # where entity in user, group, project or domain key, value = self._convert_entity_shorthand( - param, test['params'], test_data) + param, test['params'], test_data + ) args[key] = value results = PROVIDERS.assignment_api.list_role_assignments(**args) check_results(test['results'], results, len(args)) @@ -440,11 +460,13 @@ class AssignmentTestHelperMixin(object): if 'implied_roles' in test_plan: self.create_implied_roles(test_plan['implied_roles'], test_data) if 'group_memberships' in test_plan: - self.create_group_memberships(test_plan['group_memberships'], - test_data) + self.create_group_memberships( + test_plan['group_memberships'], test_data + ) if 'assignments' in test_plan: - test_data = self.create_assignments(test_plan['assignments'], - test_data) + test_data = self.create_assignments( + test_plan['assignments'], test_data + ) self.execute_assignment_cases(test_plan, test_data) return test_data @@ -458,39 +480,46 @@ class AssignmentTests(AssignmentTestHelperMixin): def test_project_add_and_remove_user_role(self): user_ids = PROVIDERS.assignment_api.list_user_ids_for_project( - self.project_bar['id']) + self.project_bar['id'] + ) self.assertNotIn(self.user_two['id'], user_ids) PROVIDERS.assignment_api.add_role_to_user_and_project( project_id=self.project_bar['id'], user_id=self.user_two['id'], - role_id=self.role_other['id']) + role_id=self.role_other['id'], + ) user_ids = PROVIDERS.assignment_api.list_user_ids_for_project( - self.project_bar['id']) + self.project_bar['id'] + ) self.assertIn(self.user_two['id'], user_ids) PROVIDERS.assignment_api.remove_role_from_user_and_project( project_id=self.project_bar['id'], user_id=self.user_two['id'], - role_id=self.role_other['id']) + role_id=self.role_other['id'], + ) user_ids = PROVIDERS.assignment_api.list_user_ids_for_project( - self.project_bar['id']) + self.project_bar['id'] + ) self.assertNotIn(self.user_two['id'], user_ids) def test_remove_user_role_not_assigned(self): # Expect failure if attempt to remove a role that was never assigned to # the user. - self.assertRaises(exception.RoleNotFound, - PROVIDERS.assignment_api. - remove_role_from_user_and_project, - project_id=self.project_bar['id'], - user_id=self.user_two['id'], - role_id=self.role_other['id']) + self.assertRaises( + exception.RoleNotFound, + PROVIDERS.assignment_api.remove_role_from_user_and_project, + project_id=self.project_bar['id'], + user_id=self.user_two['id'], + role_id=self.role_other['id'], + ) def test_list_user_ids_for_project(self): user_ids = PROVIDERS.assignment_api.list_user_ids_for_project( - self.project_baz['id']) + self.project_baz['id'] + ) self.assertEqual(2, len(user_ids)) self.assertIn(self.user_two['id'], user_ids) self.assertIn(self.user_badguy['id'], user_ids) @@ -501,9 +530,9 @@ class AssignmentTests(AssignmentTestHelperMixin): user_ref = PROVIDERS.identity_api.create_user(user_ref) # Create project project_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - PROVIDERS.resource_api.create_project( - project_ref['id'], project_ref) + domain_id=CONF.identity.default_domain_id + ) + PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) # Create 2 roles and give user each role in project for i in range(2): role_ref = unit.new_role_ref() @@ -511,37 +540,49 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.add_role_to_user_and_project( user_id=user_ref['id'], project_id=project_ref['id'], - role_id=role_ref['id']) + role_id=role_ref['id'], + ) # Get the list of user_ids in project user_ids = PROVIDERS.assignment_api.list_user_ids_for_project( - project_ref['id']) + project_ref['id'] + ) # Ensure the user is only returned once self.assertEqual(1, len(user_ids)) def test_get_project_user_ids_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.assignment_api.list_user_ids_for_project, - uuid.uuid4().hex) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.assignment_api.list_user_ids_for_project, + uuid.uuid4().hex, + ) def test_list_role_assignments_unfiltered(self): """Test unfiltered listing of role assignments.""" test_plan = { # Create a domain, with a user, group & project - 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1}, - 'roles': 3}, + 'entities': { + 'domains': {'users': 1, 'groups': 1, 'projects': 1}, + 'roles': 3, + }, # Create a grant of each type (user/group on project/domain) - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}], + 'assignments': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'user': 0, 'role': 1, 'project': 0}, + {'group': 0, 'role': 2, 'domain': 0}, + {'group': 0, 'role': 2, 'project': 0}, + ], 'tests': [ # Check that we get back the 4 assignments - {'params': {}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}]} - ] + { + 'params': {}, + 'results': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'user': 0, 'role': 1, 'project': 0}, + {'group': 0, 'role': 2, 'domain': 0}, + {'group': 0, 'role': 2, 'project': 0}, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -549,21 +590,33 @@ class AssignmentTests(AssignmentTestHelperMixin): """Test listing of role assignments filtered by role ID.""" test_plan = { # Create a user, group & project in the default domain - 'entities': {'domains': {'id': CONF.identity.default_domain_id, - 'users': 1, 'groups': 1, 'projects': 1}, - 'roles': 3}, + 'entities': { + 'domains': { + 'id': CONF.identity.default_domain_id, + 'users': 1, + 'groups': 1, + 'projects': 1, + }, + 'roles': 3, + }, # Create a grant of each type (user/group on project/domain) - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}], + 'assignments': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'user': 0, 'role': 1, 'project': 0}, + {'group': 0, 'role': 2, 'domain': 0}, + {'group': 0, 'role': 2, 'project': 0}, + ], 'tests': [ # Check that when filtering by role, we only get back those # that match - {'params': {'role': 2}, - 'results': [{'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}]} - ] + { + 'params': {'role': 2}, + 'results': [ + {'group': 0, 'role': 2, 'domain': 0}, + {'group': 0, 'role': 2, 'project': 0}, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -572,20 +625,28 @@ class AssignmentTests(AssignmentTestHelperMixin): # listed then the group role assignment is included in the list. test_plan = { - 'entities': {'domains': {'id': CONF.identity.default_domain_id, - 'groups': 1, 'projects': 1}, - 'roles': 1}, + 'entities': { + 'domains': { + 'id': CONF.identity.default_domain_id, + 'groups': 1, + 'projects': 1, + }, + 'roles': 1, + }, 'assignments': [{'group': 0, 'role': 0, 'project': 0}], 'tests': [ - {'params': {}, - 'results': [{'group': 0, 'role': 0, 'project': 0}]} - ] + { + 'params': {}, + 'results': [{'group': 0, 'role': 0, 'project': 0}], + } + ], } self.execute_assignment_plan(test_plan) def test_list_role_assignments_bad_role(self): assignment_list = PROVIDERS.assignment_api.list_role_assignments( - role_id=uuid.uuid4().hex) + role_id=uuid.uuid4().hex + ) self.assertEqual([], assignment_list) def test_list_role_assignments_user_not_found(self): @@ -596,8 +657,9 @@ class AssignmentTests(AssignmentTestHelperMixin): # this simulates the possibility of a user being deleted # directly in the backend and still having lingering role # assignments. - with mock.patch.object(PROVIDERS.identity_api, 'get_user', - _user_not_found): + with mock.patch.object( + PROVIDERS.identity_api, 'get_user', _user_not_found + ): assignment_list = PROVIDERS.assignment_api.list_role_assignments( include_names=True ) @@ -622,18 +684,22 @@ class AssignmentTests(AssignmentTestHelperMixin): # 2) create a group and 2 users in that group domain_id = CONF.identity.default_domain_id group = PROVIDERS.identity_api.create_group( - unit.new_group_ref(domain_id=domain_id)) + unit.new_group_ref(domain_id=domain_id) + ) user1 = PROVIDERS.identity_api.create_user( - unit.new_user_ref(domain_id=domain_id)) + unit.new_user_ref(domain_id=domain_id) + ) user2 = PROVIDERS.identity_api.create_user( - unit.new_user_ref(domain_id=domain_id)) + unit.new_user_ref(domain_id=domain_id) + ) PROVIDERS.identity_api.add_user_to_group(user1['id'], group['id']) PROVIDERS.identity_api.add_user_to_group(user2['id'], group['id']) # 3) create a role assignment for the group PROVIDERS.assignment_api.create_grant( group_id=group['id'], domain_id=domain_id, - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) num_assignments = len(PROVIDERS.assignment_api.list_role_assignments()) self.assertEqual(1, num_assignments) @@ -641,8 +707,9 @@ class AssignmentTests(AssignmentTestHelperMixin): # Patch get_group to return GroupNotFound, allowing us to confirm # that the exception is handled properly when include_names processing # attempts to lookup a group that has been deleted in the backend - with mock.patch.object(PROVIDERS.identity_api, 'get_group', - _group_not_found): + with mock.patch.object( + PROVIDERS.identity_api, 'get_group', _group_not_found + ): # Mocking a dependent function makes the cache invalid keystone.assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() @@ -662,16 +729,18 @@ class AssignmentTests(AssignmentTestHelperMixin): self.assertEqual('', assignment['group_domain_name']) self.assertTrue(includes_group_assignments) - num_effective = len(PROVIDERS.assignment_api.list_role_assignments( - effective=True)) + num_effective = len( + PROVIDERS.assignment_api.list_role_assignments(effective=True) + ) self.assertGreater(num_effective, len(assignment_list)) # Patch list_users_in_group to return GroupNotFound allowing us to # confirm that the exception is handled properly when effective # processing attempts to lookup users for a group that has been deleted # in the backend - with mock.patch.object(PROVIDERS.identity_api, 'list_users_in_group', - _group_not_found): + with mock.patch.object( + PROVIDERS.identity_api, 'list_users_in_group', _group_not_found + ): # Mocking a dependent function makes the cache invalid keystone.assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() @@ -685,22 +754,25 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.delete_grant( group_id=group['id'], domain_id=domain_id, - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) # TODO(edmondsw) should cleanup users/groups as well, but that raises # LDAP read-only issues def test_add_duplicate_role_grant(self): roles_ref = PROVIDERS.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.project_bar['id']) + self.user_foo['id'], self.project_bar['id'] + ) self.assertNotIn(self.role_admin['id'], roles_ref) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.project_bar['id'], self.role_admin['id']) + self.user_foo['id'], self.project_bar['id'], self.role_admin['id'] + ) self.assertRaises( exception.Conflict, PROVIDERS.assignment_api.add_role_to_user_and_project, self.user_foo['id'], self.project_bar['id'], - self.role_admin['id'] + self.role_admin['id'], ) def test_get_role_by_user_and_project_with_user_in_group(self): @@ -717,7 +789,8 @@ class AssignmentTests(AssignmentTestHelperMixin): user_ref = PROVIDERS.identity_api.create_user(user_ref) project_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) @@ -733,32 +806,37 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.add_role_to_user_and_project( user_id=user_ref['id'], project_id=project_ref['id'], - role_id=role_ref['id']) + role_id=role_ref['id'], + ) role_list = PROVIDERS.assignment_api.get_roles_for_user_and_project( - user_ref['id'], - project_ref['id']) + user_ref['id'], project_ref['id'] + ) - self.assertEqual(set([r['id'] for r in role_ref_list]), - set(role_list)) + self.assertEqual(set([r['id'] for r in role_ref_list]), set(role_list)) def test_get_role_by_user_and_project(self): roles_ref = PROVIDERS.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.project_bar['id']) + self.user_foo['id'], self.project_bar['id'] + ) self.assertNotIn(self.role_admin['id'], roles_ref) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.project_bar['id'], self.role_admin['id']) + self.user_foo['id'], self.project_bar['id'], self.role_admin['id'] + ) roles_ref = PROVIDERS.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.project_bar['id']) + self.user_foo['id'], self.project_bar['id'] + ) self.assertIn(self.role_admin['id'], roles_ref) self.assertNotIn(default_fixtures.MEMBER_ROLE_ID, roles_ref) PROVIDERS.assignment_api.add_role_to_user_and_project( self.user_foo['id'], self.project_bar['id'], - default_fixtures.MEMBER_ROLE_ID) + default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.project_bar['id']) + self.user_foo['id'], self.project_bar['id'] + ) self.assertIn(self.role_admin['id'], roles_ref) self.assertIn(default_fixtures.MEMBER_ROLE_ID, roles_ref) @@ -775,15 +853,18 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.create_grant( user_id=new_user['id'], project_id=new_project['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) PROVIDERS.assignment_api.create_grant( user_id=new_user['id'], domain_id=new_domain['id'], role_id=role['id'], - inherited_to_projects=True) + inherited_to_projects=True, + ) roles_ids = PROVIDERS.assignment_api.get_roles_for_trustor_and_project( - new_user['id'], new_project['id']) + new_user['id'], new_project['id'] + ) self.assertEqual(2, len(roles_ids)) self.assertIn(self.role_member['id'], roles_ids) self.assertIn(role['id'], roles_ids) @@ -809,25 +890,29 @@ class AssignmentTests(AssignmentTestHelperMixin): new_user2 = unit.new_user_ref(domain_id=new_domain['id']) new_user2 = PROVIDERS.identity_api.create_user(new_user2) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=new_user1['id'], - domain_id=new_domain['id']) + user_id=new_user1['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) # Now create the grants (roles are defined in default_fixtures) PROVIDERS.assignment_api.create_grant( user_id=new_user1['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) PROVIDERS.assignment_api.create_grant( user_id=new_user1['id'], domain_id=new_domain['id'], - role_id=default_fixtures.OTHER_ROLE_ID) + role_id=default_fixtures.OTHER_ROLE_ID, + ) PROVIDERS.assignment_api.create_grant( user_id=new_user2['id'], domain_id=new_domain['id'], - role_id=default_fixtures.ADMIN_ROLE_ID) + role_id=default_fixtures.ADMIN_ROLE_ID, + ) # Read back the roles for user1 on domain roles_ids = PROVIDERS.assignment_api.get_roles_for_user_and_domain( - new_user1['id'], new_domain['id']) + new_user1['id'], new_domain['id'] + ) self.assertEqual(2, len(roles_ids)) self.assertIn(self.role_member['id'], roles_ids) self.assertIn(self.role_other['id'], roles_ids) @@ -836,14 +921,16 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.delete_grant( user_id=new_user1['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) PROVIDERS.assignment_api.delete_grant( user_id=new_user1['id'], domain_id=new_domain['id'], - role_id=default_fixtures.OTHER_ROLE_ID) + role_id=default_fixtures.OTHER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=new_user1['id'], - domain_id=new_domain['id']) + user_id=new_user1['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) def test_get_roles_for_user_and_domain_returns_not_found(self): @@ -863,14 +950,14 @@ class AssignmentTests(AssignmentTestHelperMixin): exception.UserNotFound, PROVIDERS.assignment_api.get_roles_for_user_and_domain, uuid.uuid4().hex, - new_domain['id'] + new_domain['id'], ) self.assertRaises( exception.DomainNotFound, PROVIDERS.assignment_api.get_roles_for_user_and_domain, new_user1['id'], - uuid.uuid4().hex + uuid.uuid4().hex, ) def test_get_roles_for_user_and_project_returns_not_found(self): @@ -878,14 +965,14 @@ class AssignmentTests(AssignmentTestHelperMixin): exception.UserNotFound, PROVIDERS.assignment_api.get_roles_for_user_and_project, uuid.uuid4().hex, - self.project_bar['id'] + self.project_bar['id'], ) self.assertRaises( exception.ProjectNotFound, PROVIDERS.assignment_api.get_roles_for_user_and_project, self.user_foo['id'], - uuid.uuid4().hex + uuid.uuid4().hex, ) def test_add_role_to_user_and_project_returns_not_found(self): @@ -894,7 +981,7 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.add_role_to_user_and_project, self.user_foo['id'], uuid.uuid4().hex, - self.role_admin['id'] + self.role_admin['id'], ) self.assertRaises( @@ -902,7 +989,7 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.add_role_to_user_and_project, self.user_foo['id'], self.project_bar['id'], - uuid.uuid4().hex + uuid.uuid4().hex, ) def test_add_role_to_user_and_project_no_user(self): @@ -910,49 +997,57 @@ class AssignmentTests(AssignmentTestHelperMixin): # no error. user_id_not_exist = uuid.uuid4().hex PROVIDERS.assignment_api.add_role_to_user_and_project( - user_id_not_exist, self.project_bar['id'], self.role_admin['id']) + user_id_not_exist, self.project_bar['id'], self.role_admin['id'] + ) def test_remove_role_from_user_and_project(self): PROVIDERS.assignment_api.add_role_to_user_and_project( self.user_foo['id'], self.project_bar['id'], - default_fixtures.MEMBER_ROLE_ID) + default_fixtures.MEMBER_ROLE_ID, + ) PROVIDERS.assignment_api.remove_role_from_user_and_project( self.user_foo['id'], self.project_bar['id'], - default_fixtures.MEMBER_ROLE_ID) + default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.project_bar['id']) + self.user_foo['id'], self.project_bar['id'] + ) self.assertNotIn(default_fixtures.MEMBER_ROLE_ID, roles_ref) - self.assertRaises(exception.NotFound, - PROVIDERS.assignment_api. - remove_role_from_user_and_project, - self.user_foo['id'], - self.project_bar['id'], - default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.NotFound, + PROVIDERS.assignment_api.remove_role_from_user_and_project, + self.user_foo['id'], + self.project_bar['id'], + default_fixtures.MEMBER_ROLE_ID, + ) def test_get_role_grant_by_user_and_project(self): roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.project_bar['id']) + user_id=self.user_foo['id'], project_id=self.project_bar['id'] + ) self.assertEqual(1, len(roles_ref)) PROVIDERS.assignment_api.create_grant( - user_id=self.user_foo['id'], project_id=self.project_bar['id'], - role_id=self.role_admin['id'] + user_id=self.user_foo['id'], + project_id=self.project_bar['id'], + role_id=self.role_admin['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.project_bar['id']) - self.assertIn(self.role_admin['id'], - [role_ref['id'] for role_ref in roles_ref]) + user_id=self.user_foo['id'], project_id=self.project_bar['id'] + ) + self.assertIn( + self.role_admin['id'], [role_ref['id'] for role_ref in roles_ref] + ) PROVIDERS.assignment_api.create_grant( user_id=self.user_foo['id'], project_id=self.project_bar['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.project_bar['id']) + user_id=self.user_foo['id'], project_id=self.project_bar['id'] + ) roles_ref_ids = [] for ref in roles_ref: @@ -964,77 +1059,97 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.create_grant( user_id=self.user_foo['id'], project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.project_baz['id']) + user_id=self.user_foo['id'], project_id=self.project_baz['id'] + ) self.assertDictEqual(self.role_member, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( user_id=self.user_foo['id'], project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.project_baz['id']) + user_id=self.user_foo['id'], project_id=self.project_baz['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - user_id=self.user_foo['id'], - project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + user_id=self.user_foo['id'], + project_id=self.project_baz['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_role_assignment_by_project_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.check_grant_role_id, - user_id=self.user_foo['id'], - project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.check_grant_role_id, + user_id=self.user_foo['id'], + project_id=self.project_baz['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.check_grant_role_id, - group_id=uuid.uuid4().hex, - project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.check_grant_role_id, + group_id=uuid.uuid4().hex, + project_id=self.project_baz['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_role_assignment_by_domain_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.check_grant_role_id, - user_id=self.user_foo['id'], - domain_id=CONF.identity.default_domain_id, - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.check_grant_role_id, + user_id=self.user_foo['id'], + domain_id=CONF.identity.default_domain_id, + role_id=default_fixtures.MEMBER_ROLE_ID, + ) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.check_grant_role_id, - group_id=uuid.uuid4().hex, - domain_id=CONF.identity.default_domain_id, - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.check_grant_role_id, + group_id=uuid.uuid4().hex, + domain_id=CONF.identity.default_domain_id, + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_del_role_assignment_by_project_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - user_id=self.user_foo['id'], - project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + user_id=self.user_foo['id'], + project_id=self.project_baz['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - group_id=uuid.uuid4().hex, - project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + group_id=uuid.uuid4().hex, + project_id=self.project_baz['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_del_role_assignment_by_domain_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - user_id=self.user_foo['id'], - domain_id=CONF.identity.default_domain_id, - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + user_id=self.user_foo['id'], + domain_id=CONF.identity.default_domain_id, + role_id=default_fixtures.MEMBER_ROLE_ID, + ) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - group_id=uuid.uuid4().hex, - domain_id=CONF.identity.default_domain_id, - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + group_id=uuid.uuid4().hex, + domain_id=CONF.identity.default_domain_id, + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_and_remove_role_grant_by_group_and_project(self): new_domain = unit.new_domain_ref() @@ -1047,31 +1162,35 @@ class AssignmentTests(AssignmentTestHelperMixin): new_user['id'], new_group['id'] ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.project_bar['id']) + group_id=new_group['id'], project_id=self.project_bar['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( group_id=new_group['id'], project_id=self.project_bar['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.project_bar['id']) + group_id=new_group['id'], project_id=self.project_bar['id'] + ) self.assertDictEqual(self.role_member, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( group_id=new_group['id'], project_id=self.project_bar['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.project_bar['id']) + group_id=new_group['id'], project_id=self.project_bar['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - group_id=new_group['id'], - project_id=self.project_bar['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + group_id=new_group['id'], + project_id=self.project_bar['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_and_remove_role_grant_by_group_and_domain(self): new_domain = unit.new_domain_ref() @@ -1085,33 +1204,37 @@ class AssignmentTests(AssignmentTestHelperMixin): ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( group_id=new_group['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertDictEqual(self.role_member, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( group_id=new_group['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - group_id=new_group['id'], - domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + group_id=new_group['id'], + domain_id=new_domain['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_and_remove_correct_role_grant_from_a_mix(self): new_domain = unit.new_domain_ref() @@ -1131,47 +1254,54 @@ class AssignmentTests(AssignmentTestHelperMixin): ) # First check we have no grants roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) # Now add the grant we are going to test for, and some others as # well just to make sure we get back the right one PROVIDERS.assignment_api.create_grant( group_id=new_group['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) PROVIDERS.assignment_api.create_grant( - group_id=new_group2['id'], domain_id=new_domain['id'], - role_id=self.role_admin['id'] + group_id=new_group2['id'], + domain_id=new_domain['id'], + role_id=self.role_admin['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=new_user2['id'], domain_id=new_domain['id'], - role_id=self.role_admin['id'] + user_id=new_user2['id'], + domain_id=new_domain['id'], + role_id=self.role_admin['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=new_group['id'], project_id=new_project['id'], - role_id=self.role_admin['id'] + group_id=new_group['id'], + project_id=new_project['id'], + role_id=self.role_admin['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertDictEqual(self.role_member, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( group_id=new_group['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - group_id=new_group['id'], - domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + group_id=new_group['id'], + domain_id=new_domain['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_and_remove_role_grant_by_user_and_domain(self): new_domain = unit.new_domain_ref() @@ -1179,31 +1309,35 @@ class AssignmentTests(AssignmentTestHelperMixin): new_user = unit.new_user_ref(domain_id=new_domain['id']) new_user = PROVIDERS.identity_api.create_user(new_user) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) + user_id=new_user['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( user_id=new_user['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) + user_id=new_user['id'], domain_id=new_domain['id'] + ) self.assertDictEqual(self.role_member, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( user_id=new_user['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) + user_id=new_user['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - user_id=new_user['id'], - domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + user_id=new_user['id'], + domain_id=new_domain['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_and_remove_role_grant_by_group_and_cross_domain(self): group1_domain1_role = unit.new_role_ref() @@ -1221,43 +1355,48 @@ class AssignmentTests(AssignmentTestHelperMixin): group1 = unit.new_group_ref(domain_id=domain1['id']) group1 = PROVIDERS.identity_api.create_group(group1) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) + group_id=group1['id'], domain_id=domain1['id'] + ) self.assertEqual(0, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) + group_id=group1['id'], domain_id=domain2['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], domain_id=domain1['id'], - role_id=group1_domain1_role['id'] + group_id=group1['id'], + domain_id=domain1['id'], + role_id=group1_domain1_role['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], domain_id=domain2['id'], - role_id=group1_domain2_role['id'] + group_id=group1['id'], + domain_id=domain2['id'], + role_id=group1_domain2_role['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) + group_id=group1['id'], domain_id=domain1['id'] + ) self.assertDictEqual(group1_domain1_role, roles_ref[0]) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) + group_id=group1['id'], domain_id=domain2['id'] + ) self.assertDictEqual(group1_domain2_role, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( - group_id=group1['id'], domain_id=domain2['id'], - role_id=group1_domain2_role['id'] + group_id=group1['id'], + domain_id=domain2['id'], + role_id=group1_domain2_role['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) + group_id=group1['id'], domain_id=domain2['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - group_id=group1['id'], - domain_id=domain2['id'], - role_id=group1_domain2_role['id']) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + group_id=group1['id'], + domain_id=domain2['id'], + role_id=group1_domain2_role['id'], + ) def test_get_and_remove_role_grant_by_user_and_cross_domain(self): user1_domain1_role = unit.new_role_ref() @@ -1275,43 +1414,48 @@ class AssignmentTests(AssignmentTestHelperMixin): user1 = unit.new_user_ref(domain_id=domain1['id']) user1 = PROVIDERS.identity_api.create_user(user1) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) + user_id=user1['id'], domain_id=domain1['id'] + ) self.assertEqual(0, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) + user_id=user1['id'], domain_id=domain2['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain1['id'], - role_id=user1_domain1_role['id'] + user_id=user1['id'], + domain_id=domain1['id'], + role_id=user1_domain1_role['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain2['id'], - role_id=user1_domain2_role['id'] + user_id=user1['id'], + domain_id=domain2['id'], + role_id=user1_domain2_role['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) + user_id=user1['id'], domain_id=domain1['id'] + ) self.assertDictEqual(user1_domain1_role, roles_ref[0]) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) + user_id=user1['id'], domain_id=domain2['id'] + ) self.assertDictEqual(user1_domain2_role, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( - user_id=user1['id'], domain_id=domain2['id'], - role_id=user1_domain2_role['id'] + user_id=user1['id'], + domain_id=domain2['id'], + role_id=user1_domain2_role['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) + user_id=user1['id'], domain_id=domain2['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - user_id=user1['id'], - domain_id=domain2['id'], - role_id=user1_domain2_role['id']) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + user_id=user1['id'], + domain_id=domain2['id'], + role_id=user1_domain2_role['id'], + ) def test_role_grant_by_group_and_cross_domain_project(self): role1 = unit.new_role_ref() @@ -1327,20 +1471,22 @@ class AssignmentTests(AssignmentTestHelperMixin): project1 = unit.new_project_ref(domain_id=domain2['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) + group_id=group1['id'], project_id=project1['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=role1['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=role1['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=role2['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=role2['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) + group_id=group1['id'], project_id=project1['id'] + ) roles_ref_ids = [] for ref in roles_ref: @@ -1349,12 +1495,13 @@ class AssignmentTests(AssignmentTestHelperMixin): self.assertIn(role2['id'], roles_ref_ids) PROVIDERS.assignment_api.delete_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=role1['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=role1['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) + group_id=group1['id'], project_id=project1['id'] + ) self.assertEqual(1, len(roles_ref)) self.assertDictEqual(role2, roles_ref[0]) @@ -1372,8 +1519,8 @@ class AssignmentTests(AssignmentTestHelperMixin): project1 = unit.new_project_ref(domain_id=domain2['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( user_id=user1['id'], project_id=project1['id'], role_id=role1['id'] @@ -1382,8 +1529,8 @@ class AssignmentTests(AssignmentTestHelperMixin): user_id=user1['id'], project_id=project1['id'], role_id=role2['id'] ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) roles_ref_ids = [] for ref in roles_ref: @@ -1395,8 +1542,8 @@ class AssignmentTests(AssignmentTestHelperMixin): user_id=user1['id'], project_id=project1['id'], role_id=role1['id'] ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(1, len(roles_ref)) self.assertDictEqual(role2, roles_ref[0]) @@ -1434,51 +1581,68 @@ class AssignmentTests(AssignmentTestHelperMixin): def test_grant_crud_throws_exception_if_invalid_role(self): """Ensure RoleNotFound thrown if role does not exist.""" + def assert_role_not_found_exception(f, **kwargs): - self.assertRaises(exception.RoleNotFound, f, - role_id=uuid.uuid4().hex, **kwargs) + self.assertRaises( + exception.RoleNotFound, f, role_id=uuid.uuid4().hex, **kwargs + ) user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user_resp = PROVIDERS.identity_api.create_user(user) group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) group_resp = PROVIDERS.identity_api.create_group(group) project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project_resp = PROVIDERS.resource_api.create_project( project['id'], project ) - for manager_call in [PROVIDERS.assignment_api.create_grant, - PROVIDERS.assignment_api.get_grant]: - assert_role_not_found_exception( - manager_call, - user_id=user_resp['id'], project_id=project_resp['id']) - assert_role_not_found_exception( - manager_call, - group_id=group_resp['id'], project_id=project_resp['id']) + for manager_call in [ + PROVIDERS.assignment_api.create_grant, + PROVIDERS.assignment_api.get_grant, + ]: assert_role_not_found_exception( manager_call, user_id=user_resp['id'], - domain_id=CONF.identity.default_domain_id) + project_id=project_resp['id'], + ) assert_role_not_found_exception( manager_call, group_id=group_resp['id'], - domain_id=CONF.identity.default_domain_id) + project_id=project_resp['id'], + ) + assert_role_not_found_exception( + manager_call, + user_id=user_resp['id'], + domain_id=CONF.identity.default_domain_id, + ) + assert_role_not_found_exception( + manager_call, + group_id=group_resp['id'], + domain_id=CONF.identity.default_domain_id, + ) - assert_role_not_found_exception( - PROVIDERS.assignment_api.delete_grant, - user_id=user_resp['id'], project_id=project_resp['id']) - assert_role_not_found_exception( - PROVIDERS.assignment_api.delete_grant, - group_id=group_resp['id'], project_id=project_resp['id']) assert_role_not_found_exception( PROVIDERS.assignment_api.delete_grant, user_id=user_resp['id'], - domain_id=CONF.identity.default_domain_id) + project_id=project_resp['id'], + ) assert_role_not_found_exception( PROVIDERS.assignment_api.delete_grant, group_id=group_resp['id'], - domain_id=CONF.identity.default_domain_id) + project_id=project_resp['id'], + ) + assert_role_not_found_exception( + PROVIDERS.assignment_api.delete_grant, + user_id=user_resp['id'], + domain_id=CONF.identity.default_domain_id, + ) + assert_role_not_found_exception( + PROVIDERS.assignment_api.delete_grant, + group_id=group_resp['id'], + domain_id=CONF.identity.default_domain_id, + ) def test_multi_role_grant_by_user_group_on_project_domain(self): role_list = [] @@ -1497,48 +1661,52 @@ class AssignmentTests(AssignmentTestHelperMixin): project1 = unit.new_project_ref(domain_id=domain1['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group1['id'] - ) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group2['id'] - ) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id']) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group2['id']) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain1['id'], - role_id=role_list[0]['id'] + user_id=user1['id'], + domain_id=domain1['id'], + role_id=role_list[0]['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain1['id'], - role_id=role_list[1]['id'] + user_id=user1['id'], + domain_id=domain1['id'], + role_id=role_list[1]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], domain_id=domain1['id'], - role_id=role_list[2]['id'] + group_id=group1['id'], + domain_id=domain1['id'], + role_id=role_list[2]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], domain_id=domain1['id'], - role_id=role_list[3]['id'] + group_id=group1['id'], + domain_id=domain1['id'], + role_id=role_list[3]['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=project1['id'], - role_id=role_list[4]['id'] + user_id=user1['id'], + project_id=project1['id'], + role_id=role_list[4]['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=project1['id'], - role_id=role_list[5]['id'] + user_id=user1['id'], + project_id=project1['id'], + role_id=role_list[5]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=role_list[6]['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=role_list[6]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=role_list[7]['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=role_list[7]['id'], ) roles_ref = PROVIDERS.assignment_api.list_grants( user_id=user1['id'], domain_id=domain1['id'] @@ -1622,40 +1790,42 @@ class AssignmentTests(AssignmentTestHelperMixin): project1 = unit.new_project_ref(domain_id=domain1['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group1['id'] - ) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group2['id'] - ) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id']) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group2['id']) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain1['id'], - role_id=role_list[0]['id'] + user_id=user1['id'], + domain_id=domain1['id'], + role_id=role_list[0]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], domain_id=domain1['id'], - role_id=role_list[1]['id'] + group_id=group1['id'], + domain_id=domain1['id'], + role_id=role_list[1]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group2['id'], domain_id=domain1['id'], - role_id=role_list[2]['id'] + group_id=group2['id'], + domain_id=domain1['id'], + role_id=role_list[2]['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=project1['id'], - role_id=role_list[3]['id'] + user_id=user1['id'], + project_id=project1['id'], + role_id=role_list[3]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=role_list[4]['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=role_list[4]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group2['id'], project_id=project1['id'], - role_id=role_list[5]['id'] + group_id=group2['id'], + project_id=project1['id'], + role_id=role_list[5]['id'], ) # Read by the roles, ensuring we get the correct 3 roles for @@ -1698,72 +1868,87 @@ class AssignmentTests(AssignmentTestHelperMixin): user_id=user1['id'], domain_id=domain1['id'], role_id=role1['id'] ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=role1['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=role1['id'], ) PROVIDERS.assignment_api.create_grant( group_id=group1['id'], domain_id=domain1['id'], role_id=role1['id'] ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(1, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) + group_id=group1['id'], project_id=project1['id'] + ) self.assertEqual(1, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) + user_id=user1['id'], domain_id=domain1['id'] + ) self.assertEqual(1, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) + group_id=group1['id'], domain_id=domain1['id'] + ) self.assertEqual(1, len(roles_ref)) PROVIDERS.role_api.delete_role(role1['id']) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(0, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) + group_id=group1['id'], project_id=project1['id'] + ) self.assertEqual(0, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) + user_id=user1['id'], domain_id=domain1['id'] + ) self.assertEqual(0, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) + group_id=group1['id'], domain_id=domain1['id'] + ) self.assertEqual(0, len(roles_ref)) def test_list_role_assignment_by_domain(self): """Test listing of role assignment filtered by domain.""" test_plan = { # A domain with 3 users, 1 group, a spoiler domain and 2 roles. - 'entities': {'domains': [{'users': 3, 'groups': 1}, 1], - 'roles': 2}, + 'entities': { + 'domains': [{'users': 3, 'groups': 1}, 1], + 'roles': 2, + }, # Users 1 & 2 are in the group 'group_memberships': [{'group': 0, 'users': [1, 2]}], # Assign a role for user 0 and the group - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 0}], + 'assignments': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'group': 0, 'role': 1, 'domain': 0}, + ], 'tests': [ # List all effective assignments for domain[0]. # Should get one direct user role and user roles for each of # the users in the group. - {'params': {'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 1, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 2, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}} - ]}, + { + 'params': {'domain': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'domain': 0}, + { + 'user': 1, + 'role': 1, + 'domain': 0, + 'indirect': {'group': 0}, + }, + { + 'user': 2, + 'role': 1, + 'domain': 0, + 'indirect': {'group': 0}, + }, + ], + }, # Using domain[1] should return nothing - {'params': {'domain': 1, 'effective': True}, - 'results': []}, - ] + {'params': {'domain': 1, 'effective': True}, 'results': []}, + ], } self.execute_assignment_plan(test_plan) @@ -1772,43 +1957,75 @@ class AssignmentTests(AssignmentTestHelperMixin): test_plan = { # A domain with 3 users, 3 groups, a spoiler domain # plus 3 roles. - 'entities': {'domains': [{'users': 3, 'groups': 3}, 1], - 'roles': 3}, + 'entities': { + 'domains': [{'users': 3, 'groups': 3}, 1], + 'roles': 3, + }, # Users 1 & 2 are in the group 0, User 1 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 0}, - {'group': 1, 'role': 2, 'domain': 0}, - # ...and two spoiler assignments - {'user': 1, 'role': 1, 'domain': 0}, - {'group': 2, 'role': 2, 'domain': 0}], + 'group_memberships': [ + {'group': 0, 'users': [0, 1]}, + {'group': 1, 'users': [0]}, + ], + 'assignments': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'group': 0, 'role': 1, 'domain': 0}, + {'group': 1, 'role': 2, 'domain': 0}, + # ...and two spoiler assignments + {'user': 1, 'role': 1, 'domain': 0}, + {'group': 2, 'role': 2, 'domain': 0}, + ], 'tests': [ # List all effective assignments for user[0]. # Should get one direct user role and a user roles for each of # groups 0 and 1 - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 2, 'domain': 0, - 'indirect': {'group': 1}} - ]}, + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'domain': 0}, + { + 'user': 0, + 'role': 1, + 'domain': 0, + 'indirect': {'group': 0}, + }, + { + 'user': 0, + 'role': 2, + 'domain': 0, + 'indirect': {'group': 1}, + }, + ], + }, # Adding domain[0] as a filter should return the same data - {'params': {'user': 0, 'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 2, 'domain': 0, - 'indirect': {'group': 1}} - ]}, + { + 'params': {'user': 0, 'domain': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'domain': 0}, + { + 'user': 0, + 'role': 1, + 'domain': 0, + 'indirect': {'group': 0}, + }, + { + 'user': 0, + 'role': 2, + 'domain': 0, + 'indirect': {'group': 1}, + }, + ], + }, # Using domain[1] should return nothing - {'params': {'user': 0, 'domain': 1, 'effective': True}, - 'results': []}, + { + 'params': {'user': 0, 'domain': 1, 'effective': True}, + 'results': [], + }, # Using user[2] should return nothing - {'params': {'user': 2, 'domain': 0, 'effective': True}, - 'results': []}, - ] + { + 'params': {'user': 2, 'domain': 0, 'effective': True}, + 'results': [], + }, + ], } self.execute_assignment_plan(test_plan) @@ -1817,36 +2034,56 @@ class AssignmentTests(AssignmentTestHelperMixin): test_plan = { # The default domain with 3 users, 3 groups, 3 projects, # plus 3 roles. - 'entities': {'domains': {'id': CONF.identity.default_domain_id, - 'users': 3, 'groups': 3, 'projects': 3}, - 'roles': 3}, + 'entities': { + 'domains': { + 'id': CONF.identity.default_domain_id, + 'users': 3, + 'groups': 3, + 'projects': 3, + }, + 'roles': 3, + }, # Users 0 & 1 are in the group 0, User 0 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], + 'group_memberships': [ + {'group': 0, 'users': [0, 1]}, + {'group': 1, 'users': [0]}, + ], # Spread the assignments around - we want to be able to show that # if sourced by group, assignments from other sources are excluded - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'group': 0, 'role': 1, 'project': 1}, - {'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1}, - {'user': 2, 'role': 1, 'project': 1}, - {'group': 2, 'role': 2, 'project': 2} - ], + 'assignments': [ + {'user': 0, 'role': 0, 'project': 0}, + {'group': 0, 'role': 1, 'project': 1}, + {'group': 1, 'role': 2, 'project': 0}, + {'group': 1, 'role': 2, 'project': 1}, + {'user': 2, 'role': 1, 'project': 1}, + {'group': 2, 'role': 2, 'project': 2}, + ], 'tests': [ # List all effective assignments sourced from groups 0 and 1 - {'params': {'source_from_group_ids': [0, 1], - 'effective': True}, - 'results': [{'group': 0, 'role': 1, 'project': 1}, - {'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1} - ]}, + { + 'params': { + 'source_from_group_ids': [0, 1], + 'effective': True, + }, + 'results': [ + {'group': 0, 'role': 1, 'project': 1}, + {'group': 1, 'role': 2, 'project': 0}, + {'group': 1, 'role': 2, 'project': 1}, + ], + }, # Adding a role a filter should further restrict the entries - {'params': {'source_from_group_ids': [0, 1], 'role': 2, - 'effective': True}, - 'results': [{'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1} - ]}, - ] + { + 'params': { + 'source_from_group_ids': [0, 1], + 'role': 2, + 'effective': True, + }, + 'results': [ + {'group': 1, 'role': 2, 'project': 0}, + {'group': 1, 'role': 2, 'project': 1}, + ], + }, + ], } self.execute_assignment_plan(test_plan) @@ -1855,35 +2092,50 @@ class AssignmentTests(AssignmentTestHelperMixin): test_plan = { # A domain with 3 users, 3 groups, 3 projects, a second domain, # plus 3 roles. - 'entities': {'domains': [{'users': 3, 'groups': 3, 'projects': 3}, - 1], - 'roles': 3}, + 'entities': { + 'domains': [{'users': 3, 'groups': 3, 'projects': 3}, 1], + 'roles': 3, + }, # Users 0 & 1 are in the group 0, User 0 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], + 'group_memberships': [ + {'group': 0, 'users': [0, 1]}, + {'group': 1, 'users': [0]}, + ], # Spread the assignments around - we want to be able to show that # if sourced by group, assignments from other sources are excluded - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 1}, - {'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1}, - {'user': 2, 'role': 1, 'project': 1}, - {'group': 2, 'role': 2, 'project': 2} - ], + 'assignments': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'group': 0, 'role': 1, 'domain': 1}, + {'group': 1, 'role': 2, 'project': 0}, + {'group': 1, 'role': 2, 'project': 1}, + {'user': 2, 'role': 1, 'project': 1}, + {'group': 2, 'role': 2, 'project': 2}, + ], 'tests': [ # List all effective assignments sourced from groups 0 and 1 - {'params': {'source_from_group_ids': [0, 1], - 'effective': True}, - 'results': [{'group': 0, 'role': 1, 'domain': 1}, - {'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1} - ]}, + { + 'params': { + 'source_from_group_ids': [0, 1], + 'effective': True, + }, + 'results': [ + {'group': 0, 'role': 1, 'domain': 1}, + {'group': 1, 'role': 2, 'project': 0}, + {'group': 1, 'role': 2, 'project': 1}, + ], + }, # Adding a role a filter should further restrict the entries - {'params': {'source_from_group_ids': [0, 1], 'role': 1, - 'effective': True}, - 'results': [{'group': 0, 'role': 1, 'domain': 1}, - ]}, - ] + { + 'params': { + 'source_from_group_ids': [0, 1], + 'role': 1, + 'effective': True, + }, + 'results': [ + {'group': 0, 'role': 1, 'domain': 1}, + ], + }, + ], } self.execute_assignment_plan(test_plan) @@ -1891,16 +2143,20 @@ class AssignmentTests(AssignmentTestHelperMixin): """Show we trap this unsupported internal combination of params.""" group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) group = PROVIDERS.identity_api.create_group(group) - self.assertRaises(exception.UnexpectedError, - PROVIDERS.assignment_api.list_role_assignments, - effective=True, - user_id=self.user_foo['id'], - source_from_group_ids=[group['id']]) + self.assertRaises( + exception.UnexpectedError, + PROVIDERS.assignment_api.list_role_assignments, + effective=True, + user_id=self.user_foo['id'], + source_from_group_ids=[group['id']], + ) def test_list_user_project_ids_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - PROVIDERS.assignment_api.list_projects_for_user, - uuid.uuid4().hex) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.assignment_api.list_projects_for_user, + uuid.uuid4().hex, + ) def test_delete_user_with_project_association(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -1911,39 +2167,46 @@ class AssignmentTests(AssignmentTestHelperMixin): user['id'], self.project_bar['id'], role_member['id'] ) PROVIDERS.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - PROVIDERS.assignment_api.list_projects_for_user, - user['id']) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.assignment_api.list_projects_for_user, + user['id'], + ) def test_delete_user_with_project_roles(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user = PROVIDERS.identity_api.create_user(user) PROVIDERS.assignment_api.add_role_to_user_and_project( - user['id'], - self.project_bar['id'], - self.role_member['id']) + user['id'], self.project_bar['id'], self.role_member['id'] + ) PROVIDERS.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - PROVIDERS.assignment_api.list_projects_for_user, - user['id']) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.assignment_api.list_projects_for_user, + user['id'], + ) def test_delete_role_returns_not_found(self): - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.delete_role, - uuid.uuid4().hex) + self.assertRaises( + exception.RoleNotFound, + PROVIDERS.role_api.delete_role, + uuid.uuid4().hex, + ) def test_delete_project_with_role_assignments(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - project['id'], - default_fixtures.MEMBER_ROLE_ID) + self.user_foo['id'], project['id'], default_fixtures.MEMBER_ROLE_ID + ) PROVIDERS.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.assignment_api.list_user_ids_for_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.assignment_api.list_user_ids_for_project, + project['id'], + ) def test_delete_role_check_role_grant(self): role = unit.new_role_ref() @@ -1951,12 +2214,15 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.role_api.create_role(role['id'], role) PROVIDERS.role_api.create_role(alt_role['id'], alt_role) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.project_bar['id'], role['id']) + self.user_foo['id'], self.project_bar['id'], role['id'] + ) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.project_bar['id'], alt_role['id']) + self.user_foo['id'], self.project_bar['id'], alt_role['id'] + ) PROVIDERS.role_api.delete_role(role['id']) roles_ref = PROVIDERS.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.project_bar['id']) + self.user_foo['id'], self.project_bar['id'] + ) self.assertNotIn(role['id'], roles_ref) self.assertIn(alt_role['id'], roles_ref) @@ -1970,12 +2236,14 @@ class AssignmentTests(AssignmentTestHelperMixin): ) self.assertEqual(0, len(user_projects)) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=self.project_baz['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_baz['id'], + role_id=self.role_member['id'], ) user_projects = PROVIDERS.assignment_api.list_projects_for_user( user1['id'] @@ -2003,16 +2271,19 @@ class AssignmentTests(AssignmentTestHelperMixin): # Create 3 grants, one user grant, the other two as group grants PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=self.role_admin['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=self.role_admin['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group2['id'], project_id=project2['id'], - role_id=self.role_admin['id'] + group_id=group2['id'], + project_id=project2['id'], + role_id=self.role_admin['id'], ) user_projects = PROVIDERS.assignment_api.list_projects_for_user( user1['id'] @@ -2024,14 +2295,16 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.create_grant( self.role_other['id'], user_id=uuid.uuid4().hex, - project_id=self.project_bar['id']) + project_id=self.project_bar['id'], + ) def test_create_grant_no_group(self): # If call create_grant with a group that doesn't exist, doesn't fail. PROVIDERS.assignment_api.create_grant( self.role_other['id'], group_id=uuid.uuid4().hex, - project_id=self.project_bar['id']) + project_id=self.project_bar['id'], + ) def test_delete_group_removes_role_assignments(self): # When a group is deleted any role assignments for the group are @@ -2039,25 +2312,32 @@ class AssignmentTests(AssignmentTestHelperMixin): def get_member_assignments(): assignments = PROVIDERS.assignment_api.list_role_assignments() - return ([x for x in assignments if x['role_id'] == - default_fixtures.MEMBER_ROLE_ID]) + return [ + x + for x in assignments + if x['role_id'] == default_fixtures.MEMBER_ROLE_ID + ] orig_member_assignments = get_member_assignments() # Create a group. new_group = unit.new_group_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) new_group = PROVIDERS.identity_api.create_group(new_group) # Create a project. new_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(new_project['id'], new_project) # Assign a role to the group. PROVIDERS.assignment_api.create_grant( - group_id=new_group['id'], project_id=new_project['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + group_id=new_group['id'], + project_id=new_project['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) # Delete the group. PROVIDERS.identity_api.delete_group(new_group['id']) @@ -2065,8 +2345,9 @@ class AssignmentTests(AssignmentTestHelperMixin): # Check that the role assignment for the group is gone member_assignments = get_member_assignments() - self.assertThat(member_assignments, - matchers.Equals(orig_member_assignments)) + self.assertThat( + member_assignments, matchers.Equals(orig_member_assignments) + ) def test_get_roles_for_groups_on_domain(self): """Test retrieving group domain roles. @@ -2096,23 +2377,28 @@ class AssignmentTests(AssignmentTestHelperMixin): # Assign the roles - one is inherited PROVIDERS.assignment_api.create_grant( - group_id=group_list[0]['id'], domain_id=domain1['id'], - role_id=role_list[0]['id'] + group_id=group_list[0]['id'], + domain_id=domain1['id'], + role_id=role_list[0]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[1]['id'], domain_id=domain1['id'], - role_id=role_list[1]['id'] + group_id=group_list[1]['id'], + domain_id=domain1['id'], + role_id=role_list[1]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[2]['id'], domain_id=domain1['id'], - role_id=role_list[2]['id'], inherited_to_projects=True + group_id=group_list[2]['id'], + domain_id=domain1['id'], + role_id=role_list[2]['id'], + inherited_to_projects=True, ) # Now get the effective roles for the groups on the domain project. We # shouldn't get back the inherited role. role_refs = PROVIDERS.assignment_api.get_roles_for_groups( - group_id_list, domain_id=domain1['id']) + group_id_list, domain_id=domain1['id'] + ) self.assertThat(role_refs, matchers.HasLength(2)) self.assertIn(role_list[0], role_refs) @@ -2157,37 +2443,46 @@ class AssignmentTests(AssignmentTestHelperMixin): # Assign the roles - one inherited and one non-inherited on Domain1, # plus one on Project1 PROVIDERS.assignment_api.create_grant( - group_id=group_list[0]['id'], domain_id=domain1['id'], - role_id=role_list[0]['id'] + group_id=group_list[0]['id'], + domain_id=domain1['id'], + role_id=role_list[0]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[1]['id'], domain_id=domain1['id'], - role_id=role_list[1]['id'], inherited_to_projects=True + group_id=group_list[1]['id'], + domain_id=domain1['id'], + role_id=role_list[1]['id'], + inherited_to_projects=True, ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[2]['id'], project_id=project1['id'], - role_id=role_list[2]['id'] + group_id=group_list[2]['id'], + project_id=project1['id'], + role_id=role_list[2]['id'], ) # ...and a duplicate set of spoiler assignments to Domain2/Project2 PROVIDERS.assignment_api.create_grant( - group_id=group_list[3]['id'], domain_id=domain2['id'], - role_id=role_list[3]['id'] + group_id=group_list[3]['id'], + domain_id=domain2['id'], + role_id=role_list[3]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[4]['id'], domain_id=domain2['id'], - role_id=role_list[4]['id'], inherited_to_projects=True + group_id=group_list[4]['id'], + domain_id=domain2['id'], + role_id=role_list[4]['id'], + inherited_to_projects=True, ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[5]['id'], project_id=project2['id'], - role_id=role_list[5]['id'] + group_id=group_list[5]['id'], + project_id=project2['id'], + role_id=role_list[5]['id'], ) # With inheritance on, we should also get back the inherited role from # its owning domain. role_refs = PROVIDERS.assignment_api.get_roles_for_groups( - group_id_list, project_id=project1['id']) + group_id_list, project_id=project1['id'] + ) self.assertThat(role_refs, matchers.HasLength(2)) self.assertIn(role_list[1], role_refs) @@ -2222,23 +2517,28 @@ class AssignmentTests(AssignmentTestHelperMixin): # Assign the roles - one is inherited PROVIDERS.assignment_api.create_grant( - group_id=group_list[0]['id'], domain_id=domain_list[0]['id'], - role_id=role1['id'] + group_id=group_list[0]['id'], + domain_id=domain_list[0]['id'], + role_id=role1['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[1]['id'], domain_id=domain_list[1]['id'], - role_id=role1['id'] + group_id=group_list[1]['id'], + domain_id=domain_list[1]['id'], + role_id=role1['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[2]['id'], domain_id=domain_list[2]['id'], - role_id=role1['id'], inherited_to_projects=True + group_id=group_list[2]['id'], + domain_id=domain_list[2]['id'], + role_id=role1['id'], + inherited_to_projects=True, ) # Now list the domains that have roles for any of the 3 groups # We shouldn't get back domain[2] since that had an inherited role. - domain_refs = ( - PROVIDERS.assignment_api.list_domains_for_groups(group_id_list)) + domain_refs = PROVIDERS.assignment_api.list_domains_for_groups( + group_id_list + ) self.assertThat(domain_refs, matchers.HasLength(2)) self.assertIn(domain_list[0], domain_refs) @@ -2295,43 +2595,56 @@ class AssignmentTests(AssignmentTestHelperMixin): # Assign the roles - one inherited and one non-inherited on Domain1, # plus one on Project1 and Project2 PROVIDERS.assignment_api.create_grant( - group_id=group_list[0]['id'], domain_id=domain1['id'], - role_id=role_list[0]['id'] + group_id=group_list[0]['id'], + domain_id=domain1['id'], + role_id=role_list[0]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[1]['id'], domain_id=domain1['id'], - role_id=role_list[1]['id'], inherited_to_projects=True + group_id=group_list[1]['id'], + domain_id=domain1['id'], + role_id=role_list[1]['id'], + inherited_to_projects=True, ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[2]['id'], project_id=project1['id'], - role_id=role_list[2]['id'] + group_id=group_list[2]['id'], + project_id=project1['id'], + role_id=role_list[2]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[3]['id'], project_id=project2['id'], - role_id=role_list[3]['id'] + group_id=group_list[3]['id'], + project_id=project2['id'], + role_id=role_list[3]['id'], ) # ...and a few of spoiler assignments to Domain2/Project4 PROVIDERS.assignment_api.create_grant( - group_id=group_list[4]['id'], domain_id=domain2['id'], - role_id=role_list[4]['id'] + group_id=group_list[4]['id'], + domain_id=domain2['id'], + role_id=role_list[4]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[5]['id'], domain_id=domain2['id'], - role_id=role_list[5]['id'], inherited_to_projects=True + group_id=group_list[5]['id'], + domain_id=domain2['id'], + role_id=role_list[5]['id'], + inherited_to_projects=True, ) PROVIDERS.assignment_api.create_grant( - group_id=group_list[6]['id'], project_id=project4['id'], - role_id=role_list[6]['id'] + group_id=group_list[6]['id'], + project_id=project4['id'], + role_id=role_list[6]['id'], ) - group_id_list = [group_list[1]['id'], group_list[2]['id'], - group_list[3]['id']] + group_id_list = [ + group_list[1]['id'], + group_list[2]['id'], + group_list[3]['id'], + ] # With inheritance on, we should also get back the Project3 due to the # inherited role from its owning domain. - project_refs = ( - PROVIDERS.assignment_api.list_projects_for_groups(group_id_list)) + project_refs = PROVIDERS.assignment_api.list_projects_for_groups( + group_id_list + ) self.assertThat(project_refs, matchers.HasLength(3)) self.assertIn(project1, project_refs) @@ -2371,30 +2684,36 @@ class AssignmentTests(AssignmentTestHelperMixin): new_group = PROVIDERS.identity_api.create_group(new_group) PROVIDERS.resource_api.create_project(new_project['id'], new_project) PROVIDERS.assignment_api.create_grant( - user_id=new_user['id'], project_id=new_project['id'], - role_id=new_role['id'] + user_id=new_user['id'], + project_id=new_project['id'], + role_id=new_role['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=new_group['id'], project_id=new_project['id'], - role_id=new_role['id'] + group_id=new_group['id'], + project_id=new_project['id'], + role_id=new_role['id'], ) PROVIDERS.assignment_api.create_grant( - domain_id=new_domain['id'], user_id=new_user['id'], - role_id=new_role['id'] + domain_id=new_domain['id'], + user_id=new_user['id'], + role_id=new_role['id'], ) # Get the created assignments with the include_names flag _asgmt_prj = PROVIDERS.assignment_api.list_role_assignments( user_id=new_user['id'], project_id=new_project['id'], - include_names=True) + include_names=True, + ) _asgmt_grp = PROVIDERS.assignment_api.list_role_assignments( group_id=new_group['id'], project_id=new_project['id'], - include_names=True) + include_names=True, + ) _asgmt_dmn = PROVIDERS.assignment_api.list_role_assignments( domain_id=new_domain['id'], user_id=new_user['id'], - include_names=True) + include_names=True, + ) # Make sure we can get back the correct number of assignments self.assertThat(_asgmt_prj, matchers.HasLength(1)) self.assertThat(_asgmt_grp, matchers.HasLength(1)) @@ -2404,45 +2723,44 @@ class AssignmentTests(AssignmentTestHelperMixin): first_asgmt_grp = _asgmt_grp[0] first_asgmt_dmn = _asgmt_dmn[0] # Assert the names are correct in the project response - self.assertEqual(new_project['name'], - first_asgmt_prj['project_name']) - self.assertEqual(new_project['domain_id'], - first_asgmt_prj['project_domain_id']) - self.assertEqual(new_user['name'], - first_asgmt_prj['user_name']) - self.assertEqual(new_user['domain_id'], - first_asgmt_prj['user_domain_id']) - self.assertEqual(new_role['name'], - first_asgmt_prj['role_name']) + self.assertEqual(new_project['name'], first_asgmt_prj['project_name']) + self.assertEqual( + new_project['domain_id'], first_asgmt_prj['project_domain_id'] + ) + self.assertEqual(new_user['name'], first_asgmt_prj['user_name']) + self.assertEqual( + new_user['domain_id'], first_asgmt_prj['user_domain_id'] + ) + self.assertEqual(new_role['name'], first_asgmt_prj['role_name']) if domain_role: - self.assertEqual(new_role['domain_id'], - first_asgmt_prj['role_domain_id']) + self.assertEqual( + new_role['domain_id'], first_asgmt_prj['role_domain_id'] + ) # Assert the names are correct in the group response - self.assertEqual(new_group['name'], - first_asgmt_grp['group_name']) - self.assertEqual(new_group['domain_id'], - first_asgmt_grp['group_domain_id']) - self.assertEqual(new_project['name'], - first_asgmt_grp['project_name']) - self.assertEqual(new_project['domain_id'], - first_asgmt_grp['project_domain_id']) - self.assertEqual(new_role['name'], - first_asgmt_grp['role_name']) + self.assertEqual(new_group['name'], first_asgmt_grp['group_name']) + self.assertEqual( + new_group['domain_id'], first_asgmt_grp['group_domain_id'] + ) + self.assertEqual(new_project['name'], first_asgmt_grp['project_name']) + self.assertEqual( + new_project['domain_id'], first_asgmt_grp['project_domain_id'] + ) + self.assertEqual(new_role['name'], first_asgmt_grp['role_name']) if domain_role: - self.assertEqual(new_role['domain_id'], - first_asgmt_grp['role_domain_id']) + self.assertEqual( + new_role['domain_id'], first_asgmt_grp['role_domain_id'] + ) # Assert the names are correct in the domain response - self.assertEqual(new_domain['name'], - first_asgmt_dmn['domain_name']) - self.assertEqual(new_user['name'], - first_asgmt_dmn['user_name']) - self.assertEqual(new_user['domain_id'], - first_asgmt_dmn['user_domain_id']) - self.assertEqual(new_role['name'], - first_asgmt_dmn['role_name']) + self.assertEqual(new_domain['name'], first_asgmt_dmn['domain_name']) + self.assertEqual(new_user['name'], first_asgmt_dmn['user_name']) + self.assertEqual( + new_user['domain_id'], first_asgmt_dmn['user_domain_id'] + ) + self.assertEqual(new_role['name'], first_asgmt_dmn['role_name']) if domain_role: - self.assertEqual(new_role['domain_id'], - first_asgmt_dmn['role_domain_id']) + self.assertEqual( + new_role['domain_id'], first_asgmt_dmn['role_domain_id'] + ) def test_list_role_assignment_containing_names_global_role(self): self._test_list_role_assignment_containing_names() @@ -2458,6 +2776,7 @@ class AssignmentTests(AssignmentTestHelperMixin): - names are NOT included when include_names=False """ + def assert_does_not_contain_names(assignment): first_asgmt_prj = assignment[0] self.assertNotIn('project_name', first_asgmt_prj) @@ -2477,8 +2796,9 @@ class AssignmentTests(AssignmentTestHelperMixin): new_user = PROVIDERS.identity_api.create_user(new_user) PROVIDERS.resource_api.create_project(new_project['id'], new_project) PROVIDERS.assignment_api.create_grant( - user_id=new_user['id'], project_id=new_project['id'], - role_id=new_role['id'] + user_id=new_user['id'], + project_id=new_project['id'], + role_id=new_role['id'], ) # Get the created assignments with NO include_names flag role_assign_without_names = ( @@ -2492,7 +2812,7 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.assignment_api.list_role_assignments( user_id=new_user['id'], project_id=new_project['id'], - include_names=False + include_names=False, ) ) assert_does_not_contain_names(role_assign_without_names) @@ -2513,16 +2833,19 @@ class AssignmentTests(AssignmentTestHelperMixin): common_id = uuid.uuid4().hex # Create a project project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project = PROVIDERS.resource_api.create_project(project['id'], project) # Create a user - user = unit.new_user_ref(id=common_id, - domain_id=CONF.identity.default_domain_id) + user = unit.new_user_ref( + id=common_id, domain_id=CONF.identity.default_domain_id + ) user = PROVIDERS.identity_api.driver.create_user(common_id, user) self.assertEqual(common_id, user['id']) # Create a group - group = unit.new_group_ref(id=common_id, - domain_id=CONF.identity.default_domain_id) + group = unit.new_group_ref( + id=common_id, domain_id=CONF.identity.default_domain_id + ) group = PROVIDERS.identity_api.driver.create_group(common_id, group) self.assertEqual(common_id, group['id']) # Create four roles @@ -2532,35 +2855,45 @@ class AssignmentTests(AssignmentTestHelperMixin): roles.append(PROVIDERS.role_api.create_role(role['id'], role)) # Assign roles for user PROVIDERS.assignment_api.driver.create_grant( - user_id=user['id'], domain_id=CONF.identity.default_domain_id, - role_id=roles[0]['id']) + user_id=user['id'], + domain_id=CONF.identity.default_domain_id, + role_id=roles[0]['id'], + ) PROVIDERS.assignment_api.driver.create_grant( - user_id=user['id'], project_id=project['id'], - role_id=roles[1]['id'] + user_id=user['id'], + project_id=project['id'], + role_id=roles[1]['id'], ) # Assign roles for group PROVIDERS.assignment_api.driver.create_grant( - group_id=group['id'], domain_id=CONF.identity.default_domain_id, - role_id=roles[2]['id']) + group_id=group['id'], + domain_id=CONF.identity.default_domain_id, + role_id=roles[2]['id'], + ) PROVIDERS.assignment_api.driver.create_grant( - group_id=group['id'], project_id=project['id'], - role_id=roles[3]['id'] + group_id=group['id'], + project_id=project['id'], + role_id=roles[3]['id'], ) # Make sure they were assigned user_assignments = PROVIDERS.assignment_api.list_role_assignments( - user_id=user['id']) + user_id=user['id'] + ) self.assertThat(user_assignments, matchers.HasLength(2)) group_assignments = PROVIDERS.assignment_api.list_role_assignments( - group_id=group['id']) + group_id=group['id'] + ) self.assertThat(group_assignments, matchers.HasLength(2)) # Delete user assignments PROVIDERS.assignment_api.delete_user_assignments(user_id=user['id']) # Assert only user assignments were deleted user_assignments = PROVIDERS.assignment_api.list_role_assignments( - user_id=user['id']) + user_id=user['id'] + ) self.assertThat(user_assignments, matchers.HasLength(0)) group_assignments = PROVIDERS.assignment_api.list_role_assignments( - group_id=group['id']) + group_id=group['id'] + ) self.assertThat(group_assignments, matchers.HasLength(2)) # Make sure these remaining assignments are group-related for assignment in group_assignments: @@ -2582,16 +2915,19 @@ class AssignmentTests(AssignmentTestHelperMixin): common_id = uuid.uuid4().hex # Create a project project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project = PROVIDERS.resource_api.create_project(project['id'], project) # Create a user - user = unit.new_user_ref(id=common_id, - domain_id=CONF.identity.default_domain_id) + user = unit.new_user_ref( + id=common_id, domain_id=CONF.identity.default_domain_id + ) user = PROVIDERS.identity_api.driver.create_user(common_id, user) self.assertEqual(common_id, user['id']) # Create a group - group = unit.new_group_ref(id=common_id, - domain_id=CONF.identity.default_domain_id) + group = unit.new_group_ref( + id=common_id, domain_id=CONF.identity.default_domain_id + ) group = PROVIDERS.identity_api.driver.create_group(common_id, group) self.assertEqual(common_id, group['id']) # Create four roles @@ -2601,35 +2937,45 @@ class AssignmentTests(AssignmentTestHelperMixin): roles.append(PROVIDERS.role_api.create_role(role['id'], role)) # Assign roles for user PROVIDERS.assignment_api.driver.create_grant( - user_id=user['id'], domain_id=CONF.identity.default_domain_id, - role_id=roles[0]['id']) + user_id=user['id'], + domain_id=CONF.identity.default_domain_id, + role_id=roles[0]['id'], + ) PROVIDERS.assignment_api.driver.create_grant( - user_id=user['id'], project_id=project['id'], - role_id=roles[1]['id'] + user_id=user['id'], + project_id=project['id'], + role_id=roles[1]['id'], ) # Assign roles for group PROVIDERS.assignment_api.driver.create_grant( - group_id=group['id'], domain_id=CONF.identity.default_domain_id, - role_id=roles[2]['id']) + group_id=group['id'], + domain_id=CONF.identity.default_domain_id, + role_id=roles[2]['id'], + ) PROVIDERS.assignment_api.driver.create_grant( - group_id=group['id'], project_id=project['id'], - role_id=roles[3]['id'] + group_id=group['id'], + project_id=project['id'], + role_id=roles[3]['id'], ) # Make sure they were assigned user_assignments = PROVIDERS.assignment_api.list_role_assignments( - user_id=user['id']) + user_id=user['id'] + ) self.assertThat(user_assignments, matchers.HasLength(2)) group_assignments = PROVIDERS.assignment_api.list_role_assignments( - group_id=group['id']) + group_id=group['id'] + ) self.assertThat(group_assignments, matchers.HasLength(2)) # Delete group assignments PROVIDERS.assignment_api.delete_group_assignments(group_id=group['id']) # Assert only group assignments were deleted group_assignments = PROVIDERS.assignment_api.list_role_assignments( - group_id=group['id']) + group_id=group['id'] + ) self.assertThat(group_assignments, matchers.HasLength(0)) user_assignments = PROVIDERS.assignment_api.list_role_assignments( - user_id=user['id']) + user_id=user['id'] + ) self.assertThat(user_assignments, matchers.HasLength(2)) # Make sure these remaining assignments are user-related for assignment in group_assignments: @@ -2651,17 +2997,20 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain) PROVIDERS.assignment_api.create_grant( - group_id=group['id'], domain_id=new_domain['id'], - role_id=role['id'] + group_id=group['id'], + domain_id=new_domain['id'], + role_id=role['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=self.user_two['id'], domain_id=new_domain['id'], - role_id=role['id'] + user_id=self.user_two['id'], + domain_id=new_domain['id'], + role_id=role['id'], ) # Check there are 4 role assignments for that role role_assignments = PROVIDERS.assignment_api.list_role_assignments( - role_id=role['id']) + role_id=role['id'] + ) self.assertThat(role_assignments, matchers.HasLength(4)) # Delete first new domain and check only 2 assignments were left @@ -2671,7 +3020,8 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.resource_api.delete_domain(new_domains[0]['id']) role_assignments = PROVIDERS.assignment_api.list_role_assignments( - role_id=role['id']) + role_id=role['id'] + ) self.assertThat(role_assignments, matchers.HasLength(2)) # Delete second new domain and check no assignments were left @@ -2681,7 +3031,8 @@ class AssignmentTests(AssignmentTestHelperMixin): PROVIDERS.resource_api.delete_domain(new_domains[1]['id']) role_assignments = PROVIDERS.assignment_api.list_role_assignments( - role_id=role['id']) + role_id=role['id'] + ) self.assertEqual([], role_assignments) @@ -2689,33 +3040,62 @@ class InheritanceTests(AssignmentTestHelperMixin): def test_role_assignments_user_domain_to_project_inheritance(self): test_plan = { - 'entities': {'domains': {'users': 2, 'projects': 1}, - 'roles': 3}, - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'user': 1, 'role': 1, 'project': 0}], + 'entities': {'domains': {'users': 2, 'projects': 1}, 'roles': 3}, + 'assignments': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'user': 0, 'role': 1, 'project': 0}, + { + 'user': 0, + 'role': 2, + 'domain': 0, + 'inherited_to_projects': True, + }, + {'user': 1, 'role': 1, 'project': 0}, + ], 'tests': [ # List all direct assignments for user[0] - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': 'projects'}]}, + { + 'params': {'user': 0}, + 'results': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'user': 0, 'role': 1, 'project': 0}, + { + 'user': 0, + 'role': 2, + 'domain': 0, + 'inherited_to_projects': 'projects', + }, + ], + }, # Now the effective ones - so the domain role should turn into # a project role - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]}, + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'user': 0, 'role': 1, 'project': 0}, + { + 'user': 0, + 'role': 2, + 'project': 0, + 'indirect': {'domain': 0}, + }, + ], + }, # Narrow down to effective roles for user[0] and project[0] - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]} - ] + { + 'params': {'user': 0, 'project': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 1, 'project': 0}, + { + 'user': 0, + 'role': 2, + 'project': 0, + 'indirect': {'domain': 0}, + }, + ], + }, + ], } self.execute_assignment_plan(test_plan) @@ -2780,25 +3160,29 @@ class InheritanceTests(AssignmentTestHelperMixin): def test_crud_inherited_and_direct_assignment_for_user_on_domain(self): self._test_crud_inherited_and_direct_assignment( user_id=self.user_foo['id'], - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id, + ) def test_crud_inherited_and_direct_assignment_for_group_on_domain(self): group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) group = PROVIDERS.identity_api.create_group(group) self._test_crud_inherited_and_direct_assignment( - group_id=group['id'], domain_id=CONF.identity.default_domain_id) + group_id=group['id'], domain_id=CONF.identity.default_domain_id + ) def test_crud_inherited_and_direct_assignment_for_user_on_project(self): self._test_crud_inherited_and_direct_assignment( - user_id=self.user_foo['id'], project_id=self.project_baz['id']) + user_id=self.user_foo['id'], project_id=self.project_baz['id'] + ) def test_crud_inherited_and_direct_assignment_for_group_on_project(self): group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) group = PROVIDERS.identity_api.create_group(group) self._test_crud_inherited_and_direct_assignment( - group_id=group['id'], project_id=self.project_baz['id']) + group_id=group['id'], project_id=self.project_baz['id'] + ) def test_inherited_role_grants_for_user(self): """Test inherited user roles. @@ -2832,18 +3216,20 @@ class InheritanceTests(AssignmentTestHelperMixin): PROVIDERS.resource_api.create_project(project1['id'], project1) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(0, len(roles_ref)) # Create the first two roles - the domain one is not inherited PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=project1['id'], - role_id=role_list[0]['id'] + user_id=user1['id'], + project_id=project1['id'], + role_id=role_list[0]['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain1['id'], - role_id=role_list[1]['id'] + user_id=user1['id'], + domain_id=domain1['id'], + role_id=role_list[1]['id'], ) # Now get the effective roles for the user and project, this @@ -2858,8 +3244,10 @@ class InheritanceTests(AssignmentTestHelperMixin): # Now add an inherited role on the domain PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain1['id'], - role_id=role_list[2]['id'], inherited_to_projects=True + user_id=user1['id'], + domain_id=domain1['id'], + role_id=role_list[2]['id'], + inherited_to_projects=True, ) # Now get the effective roles for the user and project again, this @@ -2894,28 +3282,45 @@ class InheritanceTests(AssignmentTestHelperMixin): # can be refactored to simply ensure it gives the same answers. test_plan = { # A domain with a user & project, plus 3 roles. - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 3}, - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'domain': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}], + 'entities': {'domains': {'users': 1, 'projects': 1}, 'roles': 3}, + 'assignments': [ + {'user': 0, 'role': 0, 'project': 0}, + {'user': 0, 'role': 1, 'domain': 0}, + { + 'user': 0, + 'role': 2, + 'domain': 0, + 'inherited_to_projects': True, + }, + ], 'tests': [ # List all effective assignments for user[0] on project[0]. # Should get one direct role and one inherited role. - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]}, + { + 'params': {'user': 0, 'project': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 2, + 'project': 0, + 'indirect': {'domain': 0}, + }, + ], + }, # Ensure effective mode on the domain does not list the # inherited role on that domain - {'params': {'user': 0, 'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 1, 'domain': 0}]}, + { + 'params': {'user': 0, 'domain': 0, 'effective': True}, + 'results': [{'user': 0, 'role': 1, 'domain': 0}], + }, # Ensure non-inherited mode also only returns the non-inherited # role on the domain - {'params': {'user': 0, 'domain': 0, 'inherited': False}, - 'results': [{'user': 0, 'role': 1, 'domain': 0}]}, - ] + { + 'params': {'user': 0, 'domain': 0, 'inherited': False}, + 'results': [{'user': 0, 'role': 1, 'domain': 0}], + }, + ], } self.execute_assignment_plan(test_plan) @@ -2953,26 +3358,24 @@ class InheritanceTests(AssignmentTestHelperMixin): project1 = unit.new_project_ref(domain_id=domain1['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group1['id'] - ) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group2['id'] - ) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id']) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group2['id']) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(0, len(roles_ref)) # Create two roles - the domain one is not inherited PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=project1['id'], - role_id=role_list[0]['id'] + user_id=user1['id'], + project_id=project1['id'], + role_id=role_list[0]['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], domain_id=domain1['id'], - role_id=role_list[1]['id'] + group_id=group1['id'], + domain_id=domain1['id'], + role_id=role_list[1]['id'], ) # Now get the effective roles for the user and project, this @@ -2987,12 +3390,16 @@ class InheritanceTests(AssignmentTestHelperMixin): # Now add to more group roles, both inherited, to the domain PROVIDERS.assignment_api.create_grant( - group_id=group2['id'], domain_id=domain1['id'], - role_id=role_list[2]['id'], inherited_to_projects=True + group_id=group2['id'], + domain_id=domain1['id'], + role_id=role_list[2]['id'], + inherited_to_projects=True, ) PROVIDERS.assignment_api.create_grant( - group_id=group2['id'], domain_id=domain1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True + group_id=group2['id'], + domain_id=domain1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, ) # Now get the effective roles for the user and project again, this @@ -3018,28 +3425,54 @@ class InheritanceTests(AssignmentTestHelperMixin): # the same answers. test_plan = { # A domain with a user and project, 2 groups, plus 4 roles. - 'entities': {'domains': {'users': 1, 'projects': 1, 'groups': 2}, - 'roles': 4}, - 'group_memberships': [{'group': 0, 'users': [0]}, - {'group': 1, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'group': 0, 'role': 1, 'domain': 0}, - {'group': 1, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'group': 1, 'role': 3, 'domain': 0, - 'inherited_to_projects': True}], + 'entities': { + 'domains': {'users': 1, 'projects': 1, 'groups': 2}, + 'roles': 4, + }, + 'group_memberships': [ + {'group': 0, 'users': [0]}, + {'group': 1, 'users': [0]}, + ], + 'assignments': [ + {'user': 0, 'role': 0, 'project': 0}, + {'group': 0, 'role': 1, 'domain': 0}, + { + 'group': 1, + 'role': 2, + 'domain': 0, + 'inherited_to_projects': True, + }, + { + 'group': 1, + 'role': 3, + 'domain': 0, + 'inherited_to_projects': True, + }, + ], 'tests': [ # List all effective assignments for user[0] on project[0]. # Should get one direct role and both inherited roles, but # not the direct one on domain[0], even though user[0] is # in group[0]. - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0, 'group': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'domain': 0, 'group': 1}}]} - ] + { + 'params': {'user': 0, 'project': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 2, + 'project': 0, + 'indirect': {'domain': 0, 'group': 1}, + }, + { + 'user': 0, + 'role': 3, + 'project': 0, + 'indirect': {'domain': 0, 'group': 1}, + }, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3067,17 +3500,20 @@ class InheritanceTests(AssignmentTestHelperMixin): # Create 2 grants, one on a project and one inherited grant # on the domain PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain['id'], - role_id=self.role_admin['id'], inherited_to_projects=True + user_id=user1['id'], + domain_id=domain['id'], + role_id=self.role_admin['id'], + inherited_to_projects=True, ) # Should get back all three projects, one by virtue of the direct # grant, plus both projects in the domain - user_projects = ( - PROVIDERS.assignment_api.list_projects_for_user(user1['id']) + user_projects = PROVIDERS.assignment_api.list_projects_for_user( + user1['id'] ) self.assertEqual(3, len(user_projects)) @@ -3092,23 +3528,42 @@ class InheritanceTests(AssignmentTestHelperMixin): test_plan = { # A domain with 1 project, plus a second domain with 2 projects, # as well as a user. Also, create 2 roles. - 'entities': {'domains': [{'projects': 1}, - {'users': 1, 'projects': 2}], - 'roles': 2}, - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'domain': 1, - 'inherited_to_projects': True}], + 'entities': { + 'domains': [{'projects': 1}, {'users': 1, 'projects': 2}], + 'roles': 2, + }, + 'assignments': [ + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 1, + 'domain': 1, + 'inherited_to_projects': True, + }, + ], 'tests': [ # List all effective assignments for user[0] # Should get one direct role plus one inherited role for each # project in domain - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'domain': 1}}]} - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 1, + 'project': 1, + 'indirect': {'domain': 1}, + }, + { + 'user': 0, + 'role': 1, + 'project': 2, + 'indirect': {'domain': 1}, + }, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3128,13 +3583,15 @@ class InheritanceTests(AssignmentTestHelperMixin): """ # Enable OS-INHERIT extension root_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) root_project = PROVIDERS.resource_api.create_project( root_project['id'], root_project ) leaf_project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id, - parent_id=root_project['id']) + parent_id=root_project['id'], + ) leaf_project = PROVIDERS.resource_api.create_project( leaf_project['id'], leaf_project ) @@ -3144,18 +3601,21 @@ class InheritanceTests(AssignmentTestHelperMixin): # Grant inherited user role PROVIDERS.assignment_api.create_grant( - user_id=user['id'], project_id=root_project['id'], - role_id=self.role_admin['id'], inherited_to_projects=True + user_id=user['id'], + project_id=root_project['id'], + role_id=self.role_admin['id'], + inherited_to_projects=True, ) # Grant non-inherited user role PROVIDERS.assignment_api.create_grant( - user_id=user['id'], project_id=root_project['id'], - role_id=self.role_member['id'] + user_id=user['id'], + project_id=root_project['id'], + role_id=self.role_member['id'], ) # Should get back both projects: because the direct role assignment for # the root project and inherited role assignment for leaf project - user_projects = ( - PROVIDERS.assignment_api.list_projects_for_user(user['id']) + user_projects = PROVIDERS.assignment_api.list_projects_for_user( + user['id'] ) self.assertEqual(2, len(user_projects)) self.assertIn(root_project, user_projects) @@ -3173,21 +3633,39 @@ class InheritanceTests(AssignmentTestHelperMixin): # A domain with a project and sub-project, plus a user. # Also, create 2 roles. 'entities': { - 'domains': {'id': CONF.identity.default_domain_id, 'users': 1, - 'projects': {'project': 1}}, - 'roles': 2}, + 'domains': { + 'id': CONF.identity.default_domain_id, + 'users': 1, + 'projects': {'project': 1}, + }, + 'roles': 2, + }, # A direct role and an inherited role on the parent - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 0, - 'inherited_to_projects': True}], + 'assignments': [ + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 1, + 'project': 0, + 'inherited_to_projects': True, + }, + ], 'tests': [ # List all effective assignments for user[0] - should get back # one direct role plus one inherited role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'project': 0}}]} - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 1, + 'project': 1, + 'indirect': {'project': 0}, + }, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3231,25 +3709,31 @@ class InheritanceTests(AssignmentTestHelperMixin): # - one inherited user grant on domain # - one inherited group grant on domain2 PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=project3['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=project3['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], domain_id=domain['id'], - role_id=self.role_admin['id'], inherited_to_projects=True + user_id=user1['id'], + domain_id=domain['id'], + role_id=self.role_admin['id'], + inherited_to_projects=True, ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], domain_id=domain2['id'], - role_id=self.role_admin['id'], inherited_to_projects=True + group_id=group1['id'], + domain_id=domain2['id'], + role_id=self.role_admin['id'], + inherited_to_projects=True, ) # Should get back all five projects, but without a duplicate for # project3 (since it has both a direct user role and an inherited role) - user_projects = ( - PROVIDERS.assignment_api.list_projects_for_user(user1['id']) + user_projects = PROVIDERS.assignment_api.list_projects_for_user( + user1['id'] ) self.assertEqual(5, len(user_projects)) @@ -3265,33 +3749,67 @@ class InheritanceTests(AssignmentTestHelperMixin): # A domain with a 1 project, plus a second domain with 2 projects, # as well as a user & group and a 3rd domain with 2 projects. # Also, created 2 roles. - 'entities': {'domains': [{'projects': 1}, - {'users': 1, 'groups': 1, 'projects': 2}, - {'projects': 2}], - 'roles': 2}, + 'entities': { + 'domains': [ + {'projects': 1}, + {'users': 1, 'groups': 1, 'projects': 2}, + {'projects': 2}, + ], + 'roles': 2, + }, 'group_memberships': [{'group': 0, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 0, 'project': 3}, - {'user': 0, 'role': 1, 'domain': 1, - 'inherited_to_projects': True}, - {'user': 0, 'role': 1, 'domain': 2, - 'inherited_to_projects': True}], + 'assignments': [ + {'user': 0, 'role': 0, 'project': 0}, + {'user': 0, 'role': 0, 'project': 3}, + { + 'user': 0, + 'role': 1, + 'domain': 1, + 'inherited_to_projects': True, + }, + { + 'user': 0, + 'role': 1, + 'domain': 2, + 'inherited_to_projects': True, + }, + ], 'tests': [ # List all effective assignments for user[0] # Should get back both direct roles plus roles on both projects # from each domain. Duplicates should not be filtered out. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 3}, - {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'domain': 2}}, - {'user': 0, 'role': 1, 'project': 4, - 'indirect': {'domain': 2}}]} - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'project': 3}, + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 1, + 'project': 1, + 'indirect': {'domain': 1}, + }, + { + 'user': 0, + 'role': 1, + 'project': 2, + 'indirect': {'domain': 1}, + }, + { + 'user': 0, + 'role': 1, + 'project': 3, + 'indirect': {'domain': 2}, + }, + { + 'user': 0, + 'role': 1, + 'project': 4, + 'indirect': {'domain': 2}, + }, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3310,13 +3828,15 @@ class InheritanceTests(AssignmentTestHelperMixin): """ root_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) root_project = PROVIDERS.resource_api.create_project( root_project['id'], root_project ) leaf_project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id, - parent_id=root_project['id']) + parent_id=root_project['id'], + ) leaf_project = PROVIDERS.resource_api.create_project( leaf_project['id'], leaf_project ) @@ -3330,13 +3850,16 @@ class InheritanceTests(AssignmentTestHelperMixin): # Grant inherited group role PROVIDERS.assignment_api.create_grant( - group_id=group['id'], project_id=root_project['id'], - role_id=self.role_admin['id'], inherited_to_projects=True + group_id=group['id'], + project_id=root_project['id'], + role_id=self.role_admin['id'], + inherited_to_projects=True, ) # Grant non-inherited group role PROVIDERS.assignment_api.create_grant( - group_id=group['id'], project_id=root_project['id'], - role_id=self.role_member['id'] + group_id=group['id'], + project_id=root_project['id'], + role_id=self.role_member['id'], ) # Should get back both projects: because the direct role assignment for # the root project and inherited role assignment for leaf project @@ -3359,24 +3882,46 @@ class InheritanceTests(AssignmentTestHelperMixin): # A domain with a project and sub-project, plus a user. # Also, create 2 roles. 'entities': { - 'domains': {'id': CONF.identity.default_domain_id, 'users': 1, - 'groups': 1, - 'projects': {'project': 1}}, - 'roles': 2}, + 'domains': { + 'id': CONF.identity.default_domain_id, + 'users': 1, + 'groups': 1, + 'projects': {'project': 1}, + }, + 'roles': 2, + }, 'group_memberships': [{'group': 0, 'users': [0]}], # A direct role and an inherited role on the parent - 'assignments': [{'group': 0, 'role': 0, 'project': 0}, - {'group': 0, 'role': 1, 'project': 0, - 'inherited_to_projects': True}], + 'assignments': [ + {'group': 0, 'role': 0, 'project': 0}, + { + 'group': 0, + 'role': 1, + 'project': 0, + 'inherited_to_projects': True, + }, + ], 'tests': [ # List all effective assignments for user[0] - should get back # one direct role plus one inherited role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'group': 0, 'project': 0}}]} - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + { + 'user': 0, + 'role': 0, + 'project': 0, + 'indirect': {'group': 0}, + }, + { + 'user': 0, + 'role': 1, + 'project': 1, + 'indirect': {'group': 0, 'project': 0}, + }, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3398,32 +3943,45 @@ class InheritanceTests(AssignmentTestHelperMixin): # Also, create 1 user and 4 roles. 'entities': { 'domains': { - 'projects': {'project': [{'project': 2}, - {'project': 2}]}, - 'users': 1}, - 'roles': 4}, + 'projects': {'project': [{'project': 2}, {'project': 2}]}, + 'users': 1, + }, + 'roles': 4, + }, 'assignments': [ # Direct assignment to projects 1 and 2 {'user': 0, 'role': 0, 'project': 1}, {'user': 0, 'role': 1, 'project': 2}, # Also an inherited assignment on project 1 - {'user': 0, 'role': 2, 'project': 1, - 'inherited_to_projects': True}, + { + 'user': 0, + 'role': 2, + 'project': 1, + 'inherited_to_projects': True, + }, # ...and two spoiler assignments, one to the root and one # to project 4 {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 3, 'project': 4}], + {'user': 0, 'role': 3, 'project': 4}, + ], 'tests': [ # List all assignments for project 1 and its subtree. - {'params': {'project': 1, 'include_subtree': True}, - 'results': [ - # Only the actual assignments should be returned, no - # expansion of inherited assignments - {'user': 0, 'role': 0, 'project': 1}, - {'user': 0, 'role': 1, 'project': 2}, - {'user': 0, 'role': 2, 'project': 1, - 'inherited_to_projects': 'projects'}]} - ] + { + 'params': {'project': 1, 'include_subtree': True}, + 'results': [ + # Only the actual assignments should be returned, no + # expansion of inherited assignments + {'user': 0, 'role': 0, 'project': 1}, + {'user': 0, 'role': 1, 'project': 2}, + { + 'user': 0, + 'role': 2, + 'project': 1, + 'inherited_to_projects': 'projects', + }, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3444,34 +4002,54 @@ class InheritanceTests(AssignmentTestHelperMixin): # Also, create 1 user and 4 roles. 'entities': { 'domains': { - 'projects': {'project': [{'project': 2}, - {'project': 2}]}, - 'users': 1}, - 'roles': 4}, + 'projects': {'project': [{'project': 2}, {'project': 2}]}, + 'users': 1, + }, + 'roles': 4, + }, 'assignments': [ # An inherited assignment on project 1 - {'user': 0, 'role': 1, 'project': 1, - 'inherited_to_projects': True}, + { + 'user': 0, + 'role': 1, + 'project': 1, + 'inherited_to_projects': True, + }, # A direct assignment to project 2 {'user': 0, 'role': 2, 'project': 2}, # ...and two spoiler assignments, one to the root and one # to project 4 {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 3, 'project': 4}], + {'user': 0, 'role': 3, 'project': 4}, + ], 'tests': [ # List all effective assignments for project 1 and its subtree. - {'params': {'project': 1, 'effective': True, - 'include_subtree': True}, - 'results': [ - # The inherited assignment on project 1 should appear only - # on its children - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'project': 1}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'project': 1}}, - # And finally the direct assignment on project 2 - {'user': 0, 'role': 2, 'project': 2}]} - ] + { + 'params': { + 'project': 1, + 'effective': True, + 'include_subtree': True, + }, + 'results': [ + # The inherited assignment on project 1 should appear only + # on its children + { + 'user': 0, + 'role': 1, + 'project': 2, + 'indirect': {'project': 1}, + }, + { + 'user': 0, + 'role': 1, + 'project': 3, + 'indirect': {'project': 1}, + }, + # And finally the direct assignment on project 2 + {'user': 0, 'role': 2, 'project': 2}, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3499,59 +4077,114 @@ class InheritanceTests(AssignmentTestHelperMixin): # Also, create 2 users, 1 group and 4 roles. 'entities': { 'domains': { - 'projects': {'project': [{'project': 2}, - {'project': 2}]}, - 'users': 2, 'groups': 1}, - 'roles': 4}, + 'projects': {'project': [{'project': 2}, {'project': 2}]}, + 'users': 2, + 'groups': 1, + }, + 'roles': 4, + }, # Both users are part of the same group 'group_memberships': [{'group': 0, 'users': [0, 1]}], # We are going to ask for listing of assignment on project 1 and # it's subtree. So first we'll add two inherited assignments above # this (one user and one for a group that contains this user). - 'assignments': [{'user': 0, 'role': 0, 'project': 0, - 'inherited_to_projects': True}, - {'group': 0, 'role': 1, 'project': 0, - 'inherited_to_projects': True}, - # Now an inherited assignment on project 1 itself, - # which should ONLY show up on its children - {'user': 0, 'role': 2, 'project': 1, - 'inherited_to_projects': True}, - # ...and a direct assignment on one of those - # children - {'user': 0, 'role': 3, 'project': 2}, - # The rest are spoiler assignments - {'user': 0, 'role': 2, 'project': 5}, - {'user': 0, 'role': 3, 'project': 4}], + 'assignments': [ + { + 'user': 0, + 'role': 0, + 'project': 0, + 'inherited_to_projects': True, + }, + { + 'group': 0, + 'role': 1, + 'project': 0, + 'inherited_to_projects': True, + }, + # Now an inherited assignment on project 1 itself, + # which should ONLY show up on its children + { + 'user': 0, + 'role': 2, + 'project': 1, + 'inherited_to_projects': True, + }, + # ...and a direct assignment on one of those + # children + {'user': 0, 'role': 3, 'project': 2}, + # The rest are spoiler assignments + {'user': 0, 'role': 2, 'project': 5}, + {'user': 0, 'role': 3, 'project': 4}, + ], 'tests': [ # List all effective assignments for project 1 and its subtree. - {'params': {'project': 1, 'user': 0, 'effective': True, - 'include_subtree': True}, - 'results': [ - # First, we should see the inherited user assignment from - # project 0 on all projects in the subtree - {'user': 0, 'role': 0, 'project': 1, - 'indirect': {'project': 0}}, - {'user': 0, 'role': 0, 'project': 2, - 'indirect': {'project': 0}}, - {'user': 0, 'role': 0, 'project': 3, - 'indirect': {'project': 0}}, - # Also the inherited group assignment from project 0 on - # the subtree - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'project': 0, 'group': 0}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'project': 0, 'group': 0}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'project': 0, 'group': 0}}, - # The inherited assignment on project 1 should appear only - # on its children - {'user': 0, 'role': 2, 'project': 2, - 'indirect': {'project': 1}}, - {'user': 0, 'role': 2, 'project': 3, - 'indirect': {'project': 1}}, - # And finally the direct assignment on project 2 - {'user': 0, 'role': 3, 'project': 2}]} - ] + { + 'params': { + 'project': 1, + 'user': 0, + 'effective': True, + 'include_subtree': True, + }, + 'results': [ + # First, we should see the inherited user assignment from + # project 0 on all projects in the subtree + { + 'user': 0, + 'role': 0, + 'project': 1, + 'indirect': {'project': 0}, + }, + { + 'user': 0, + 'role': 0, + 'project': 2, + 'indirect': {'project': 0}, + }, + { + 'user': 0, + 'role': 0, + 'project': 3, + 'indirect': {'project': 0}, + }, + # Also the inherited group assignment from project 0 on + # the subtree + { + 'user': 0, + 'role': 1, + 'project': 1, + 'indirect': {'project': 0, 'group': 0}, + }, + { + 'user': 0, + 'role': 1, + 'project': 2, + 'indirect': {'project': 0, 'group': 0}, + }, + { + 'user': 0, + 'role': 1, + 'project': 3, + 'indirect': {'project': 0, 'group': 0}, + }, + # The inherited assignment on project 1 should appear only + # on its children + { + 'user': 0, + 'role': 2, + 'project': 2, + 'indirect': {'project': 1}, + }, + { + 'user': 0, + 'role': 2, + 'project': 3, + 'indirect': {'project': 1}, + }, + # And finally the direct assignment on project 2 + {'user': 0, 'role': 3, 'project': 2}, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3572,37 +4205,61 @@ class InheritanceTests(AssignmentTestHelperMixin): # Also, create 1 user and 4 roles. 'entities': { 'domains': { - 'projects': {'project': [{'project': 2}, - {'project': 2}]}, - 'users': 1}, - 'roles': 4}, + 'projects': {'project': [{'project': 2}, {'project': 2}]}, + 'users': 1, + }, + 'roles': 4, + }, 'assignments': [ # An inherited assignment on the domain (which should be # applied to all the projects) - {'user': 0, 'role': 1, 'domain': 0, - 'inherited_to_projects': True}, + { + 'user': 0, + 'role': 1, + 'domain': 0, + 'inherited_to_projects': True, + }, # A direct assignment to project 2 {'user': 0, 'role': 2, 'project': 2}, # ...and two spoiler assignments, one to the root and one # to project 4 {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 3, 'project': 4}], + {'user': 0, 'role': 3, 'project': 4}, + ], 'tests': [ # List all effective assignments for project 1 and its subtree. - {'params': {'project': 1, 'effective': True, - 'include_subtree': True}, - 'results': [ - # The inherited assignment from the domain should appear - # only on the part of the subtree we are interested in - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'domain': 0}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'domain': 0}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'domain': 0}}, - # And finally the direct assignment on project 2 - {'user': 0, 'role': 2, 'project': 2}]} - ] + { + 'params': { + 'project': 1, + 'effective': True, + 'include_subtree': True, + }, + 'results': [ + # The inherited assignment from the domain should appear + # only on the part of the subtree we are interested in + { + 'user': 0, + 'role': 1, + 'project': 1, + 'indirect': {'domain': 0}, + }, + { + 'user': 0, + 'role': 1, + 'project': 2, + 'indirect': {'domain': 0}, + }, + { + 'user': 0, + 'role': 1, + 'project': 3, + 'indirect': {'domain': 0}, + }, + # And finally the direct assignment on project 2 + {'user': 0, 'role': 2, 'project': 2}, + ], + } + ], } self.execute_assignment_plan(test_plan) @@ -3612,29 +4269,46 @@ class InheritanceTests(AssignmentTestHelperMixin): # A domain with a project and sub-project, plus four users, # two groups, as well as 4 roles. 'entities': { - 'domains': {'id': CONF.identity.default_domain_id, 'users': 4, - 'groups': 2, - 'projects': {'project': 1}}, - 'roles': 4}, + 'domains': { + 'id': CONF.identity.default_domain_id, + 'users': 4, + 'groups': 2, + 'projects': {'project': 1}, + }, + 'roles': 4, + }, # Each group has a unique user member - 'group_memberships': [{'group': 0, 'users': [1]}, - {'group': 1, 'users': [3]}], + 'group_memberships': [ + {'group': 0, 'users': [1]}, + {'group': 1, 'users': [3]}, + ], # Set up assignments so that there should end up with four # effective assignments on project 1 - one direct, one due to # group membership and one user assignment inherited from the # parent and one group assignment inherited from the parent. - 'assignments': [{'user': 0, 'role': 0, 'project': 1}, - {'group': 0, 'role': 1, 'project': 1}, - {'user': 2, 'role': 2, 'project': 0, - 'inherited_to_projects': True}, - {'group': 1, 'role': 3, 'project': 0, - 'inherited_to_projects': True}], + 'assignments': [ + {'user': 0, 'role': 0, 'project': 1}, + {'group': 0, 'role': 1, 'project': 1}, + { + 'user': 2, + 'role': 2, + 'project': 0, + 'inherited_to_projects': True, + }, + { + 'group': 1, + 'role': 3, + 'project': 0, + 'inherited_to_projects': True, + }, + ], } # Use assignment plan helper to create all the entities and # assignments - then we'll run our own tests using the data test_data = self.execute_assignment_plan(test_plan) user_ids = PROVIDERS.assignment_api.list_user_ids_for_project( - test_data['projects'][1]['id']) + test_data['projects'][1]['id'] + ) self.assertThat(user_ids, matchers.HasLength(4)) for x in range(0, 4): self.assertIn(test_data['users'][x]['id'], user_ids) @@ -3644,39 +4318,68 @@ class InheritanceTests(AssignmentTestHelperMixin): test_plan = { # A domain with 3 users, 3 groups, 3 projects, a second domain, # plus 3 roles. - 'entities': {'domains': [{'users': 3, 'groups': 3, 'projects': 3}, - 1], - 'roles': 3}, + 'entities': { + 'domains': [{'users': 3, 'groups': 3, 'projects': 3}, 1], + 'roles': 3, + }, # Users 0 & 1 are in the group 0, User 0 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], + 'group_memberships': [ + {'group': 0, 'users': [0, 1]}, + {'group': 1, 'users': [0]}, + ], # Spread the assignments around - we want to be able to show that # if sourced by group, assignments from other sources are excluded - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 1}, - {'group': 1, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'group': 1, 'role': 2, 'project': 1}, - {'user': 2, 'role': 1, 'project': 1, - 'inherited_to_projects': True}, - {'group': 2, 'role': 2, 'project': 2} - ], + 'assignments': [ + {'user': 0, 'role': 0, 'domain': 0}, + {'group': 0, 'role': 1, 'domain': 1}, + { + 'group': 1, + 'role': 2, + 'domain': 0, + 'inherited_to_projects': True, + }, + {'group': 1, 'role': 2, 'project': 1}, + { + 'user': 2, + 'role': 1, + 'project': 1, + 'inherited_to_projects': True, + }, + {'group': 2, 'role': 2, 'project': 2}, + ], 'tests': [ # List all effective assignments sourced from groups 0 and 1. # We should see the inherited group assigned on the 3 projects # from domain 0, as well as the direct assignments. - {'params': {'source_from_group_ids': [0, 1], - 'effective': True}, - 'results': [{'group': 0, 'role': 1, 'domain': 1}, - {'group': 1, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}, - {'group': 1, 'role': 2, 'project': 1, - 'indirect': {'domain': 0}}, - {'group': 1, 'role': 2, 'project': 2, - 'indirect': {'domain': 0}}, - {'group': 1, 'role': 2, 'project': 1} - ]}, - ] + { + 'params': { + 'source_from_group_ids': [0, 1], + 'effective': True, + }, + 'results': [ + {'group': 0, 'role': 1, 'domain': 1}, + { + 'group': 1, + 'role': 2, + 'project': 0, + 'indirect': {'domain': 0}, + }, + { + 'group': 1, + 'role': 2, + 'project': 1, + 'indirect': {'domain': 0}, + }, + { + 'group': 1, + 'role': 2, + 'project': 2, + 'indirect': {'domain': 0}, + }, + {'group': 1, 'role': 2, 'project': 1}, + ], + }, + ], } self.execute_assignment_plan(test_plan) @@ -3692,121 +4395,195 @@ class ImpliedRoleTests(AssignmentTestHelperMixin): ) PROVIDERS.role_api.create_implied_role( - prior_role_ref['id'], - implied_role_ref['id']) + prior_role_ref['id'], implied_role_ref['id'] + ) implied_role = PROVIDERS.role_api.get_implied_role( - prior_role_ref['id'], - implied_role_ref['id']) + prior_role_ref['id'], implied_role_ref['id'] + ) expected_implied_role_ref = { 'prior_role_id': prior_role_ref['id'], - 'implied_role_id': implied_role_ref['id']} + 'implied_role_id': implied_role_ref['id'], + } self.assertLessEqual( - expected_implied_role_ref.items(), - implied_role.items()) + expected_implied_role_ref.items(), implied_role.items() + ) PROVIDERS.role_api.delete_implied_role( - prior_role_ref['id'], - implied_role_ref['id']) - self.assertRaises(exception.ImpliedRoleNotFound, - PROVIDERS.role_api.get_implied_role, - uuid.uuid4().hex, - uuid.uuid4().hex) + prior_role_ref['id'], implied_role_ref['id'] + ) + self.assertRaises( + exception.ImpliedRoleNotFound, + PROVIDERS.role_api.get_implied_role, + uuid.uuid4().hex, + uuid.uuid4().hex, + ) def test_delete_implied_role_returns_not_found(self): - self.assertRaises(exception.ImpliedRoleNotFound, - PROVIDERS.role_api.delete_implied_role, - uuid.uuid4().hex, - uuid.uuid4().hex) + self.assertRaises( + exception.ImpliedRoleNotFound, + PROVIDERS.role_api.delete_implied_role, + uuid.uuid4().hex, + uuid.uuid4().hex, + ) def test_role_assignments_simple_tree_of_implied_roles(self): """Test that implied roles are expanded out.""" test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 4}, + 'entities': {'domains': {'users': 1, 'projects': 1}, 'roles': 4}, # Three level tree of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': 1}, - {'role': 1, 'implied_roles': [2, 3]}], + 'implied_roles': [ + {'role': 0, 'implied_roles': 1}, + {'role': 1, 'implied_roles': [2, 3]}, + ], 'assignments': [{'user': 0, 'role': 0, 'project': 0}], 'tests': [ # List all direct assignments for user[0], this should just # show the one top level role assignment - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'project': 0}]}, + { + 'params': {'user': 0}, + 'results': [{'user': 0, 'role': 0, 'project': 0}], + }, # Listing in effective mode should show the implied roles # expanded out - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}]}, - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 1, + 'project': 0, + 'indirect': {'role': 0}, + }, + { + 'user': 0, + 'role': 2, + 'project': 0, + 'indirect': {'role': 1}, + }, + { + 'user': 0, + 'role': 3, + 'project': 0, + 'indirect': {'role': 1}, + }, + ], + }, + ], } self.execute_assignment_plan(test_plan) def test_circular_inferences(self): """Test that implied roles are expanded out.""" test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 4}, + 'entities': {'domains': {'users': 1, 'projects': 1}, 'roles': 4}, # Three level tree of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': [1]}, - {'role': 1, 'implied_roles': [2, 3]}, - {'role': 3, 'implied_roles': [0]}], + 'implied_roles': [ + {'role': 0, 'implied_roles': [1]}, + {'role': 1, 'implied_roles': [2, 3]}, + {'role': 3, 'implied_roles': [0]}, + ], 'assignments': [{'user': 0, 'role': 0, 'project': 0}], 'tests': [ # List all direct assignments for user[0], this should just # show the one top level role assignment - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'project': 0}]}, + { + 'params': {'user': 0}, + 'results': [{'user': 0, 'role': 0, 'project': 0}], + }, # Listing in effective mode should show the implied roles # expanded out - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 0, 'project': 0, - 'indirect': {'role': 3}}, - {'user': 0, 'role': 1, 'project': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}]}, - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'project': 0}, + { + 'user': 0, + 'role': 0, + 'project': 0, + 'indirect': {'role': 3}, + }, + { + 'user': 0, + 'role': 1, + 'project': 0, + 'indirect': {'role': 0}, + }, + { + 'user': 0, + 'role': 2, + 'project': 0, + 'indirect': {'role': 1}, + }, + { + 'user': 0, + 'role': 3, + 'project': 0, + 'indirect': {'role': 1}, + }, + ], + }, + ], } self.execute_assignment_plan(test_plan) def test_role_assignments_directed_graph_of_implied_roles(self): """Test that a role can have multiple, different prior roles.""" test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 6}, + 'entities': {'domains': {'users': 1, 'projects': 1}, 'roles': 6}, # Three level tree of implied roles, where one of the roles at the # bottom is implied by more than one top level role - 'implied_roles': [{'role': 0, 'implied_roles': [1, 2]}, - {'role': 1, 'implied_roles': [3, 4]}, - {'role': 5, 'implied_roles': 4}], + 'implied_roles': [ + {'role': 0, 'implied_roles': [1, 2]}, + {'role': 1, 'implied_roles': [3, 4]}, + {'role': 5, 'implied_roles': 4}, + ], # The user gets both top level roles - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 5, 'project': 0}], + 'assignments': [ + {'user': 0, 'role': 0, 'project': 0}, + {'user': 0, 'role': 5, 'project': 0}, + ], 'tests': [ # The implied roles should be expanded out and there should be # two entries for the role that had two different prior roles. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 5, 'project': 0}, - {'user': 0, 'role': 1, 'project': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 4, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 4, 'project': 0, - 'indirect': {'role': 5}}]}, - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'project': 0}, + {'user': 0, 'role': 5, 'project': 0}, + { + 'user': 0, + 'role': 1, + 'project': 0, + 'indirect': {'role': 0}, + }, + { + 'user': 0, + 'role': 2, + 'project': 0, + 'indirect': {'role': 0}, + }, + { + 'user': 0, + 'role': 3, + 'project': 0, + 'indirect': {'role': 1}, + }, + { + 'user': 0, + 'role': 4, + 'project': 0, + 'indirect': {'role': 1}, + }, + { + 'user': 0, + 'role': 4, + 'project': 0, + 'indirect': {'role': 5}, + }, + ], + }, + ], } test_data = self.execute_assignment_plan(test_plan) @@ -3814,7 +4591,8 @@ class ImpliedRoleTests(AssignmentTestHelperMixin): # the above by calling get_roles_for_user_and_project(), which should # list the role_ids, yet remove any duplicates role_ids = PROVIDERS.assignment_api.get_roles_for_user_and_project( - test_data['users'][0]['id'], test_data['projects'][0]['id']) + test_data['users'][0]['id'], test_data['projects'][0]['id'] + ) # We should see 6 entries, not 7, since role index 5 appeared twice in # the answer from list_role_assignments self.assertThat(role_ids, matchers.HasLength(6)) @@ -3824,109 +4602,182 @@ class ImpliedRoleTests(AssignmentTestHelperMixin): def test_role_assignments_implied_roles_filtered_by_role(self): """Test that you can filter by role even if roles are implied.""" test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 2}, - 'roles': 4}, + 'entities': {'domains': {'users': 1, 'projects': 2}, 'roles': 4}, # Three level tree of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': 1}, - {'role': 1, 'implied_roles': [2, 3]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 3, 'project': 1}], + 'implied_roles': [ + {'role': 0, 'implied_roles': 1}, + {'role': 1, 'implied_roles': [2, 3]}, + ], + 'assignments': [ + {'user': 0, 'role': 0, 'project': 0}, + {'user': 0, 'role': 3, 'project': 1}, + ], 'tests': [ # List effective roles filtering by one of the implied roles, # showing that the filter was implied post expansion of # implied roles (and that non implied roles are included in # the filter - {'params': {'role': 3, 'effective': True}, - 'results': [{'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'project': 1}]}, - ] + { + 'params': {'role': 3, 'effective': True}, + 'results': [ + { + 'user': 0, + 'role': 3, + 'project': 0, + 'indirect': {'role': 1}, + }, + {'user': 0, 'role': 3, 'project': 1}, + ], + }, + ], } self.execute_assignment_plan(test_plan) def test_role_assignments_simple_tree_of_implied_roles_on_domain(self): """Test that implied roles are expanded out when placed on a domain.""" test_plan = { - 'entities': {'domains': {'users': 1}, - 'roles': 4}, + 'entities': {'domains': {'users': 1}, 'roles': 4}, # Three level tree of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': 1}, - {'role': 1, 'implied_roles': [2, 3]}], + 'implied_roles': [ + {'role': 0, 'implied_roles': 1}, + {'role': 1, 'implied_roles': [2, 3]}, + ], 'assignments': [{'user': 0, 'role': 0, 'domain': 0}], 'tests': [ # List all direct assignments for user[0], this should just # show the one top level role assignment - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}]}, + { + 'params': {'user': 0}, + 'results': [{'user': 0, 'role': 0, 'domain': 0}], + }, # Listing in effective mode should how the implied roles # expanded out - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'domain': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 2, 'domain': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'domain': 0, - 'indirect': {'role': 1}}]}, - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + {'user': 0, 'role': 0, 'domain': 0}, + { + 'user': 0, + 'role': 1, + 'domain': 0, + 'indirect': {'role': 0}, + }, + { + 'user': 0, + 'role': 2, + 'domain': 0, + 'indirect': {'role': 1}, + }, + { + 'user': 0, + 'role': 3, + 'domain': 0, + 'indirect': {'role': 1}, + }, + ], + }, + ], } self.execute_assignment_plan(test_plan) def test_role_assignments_inherited_implied_roles(self): """Test that you can intermix inherited and implied roles.""" test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 4}, + 'entities': {'domains': {'users': 1, 'projects': 1}, 'roles': 4}, # Simply one level of implied roles 'implied_roles': [{'role': 0, 'implied_roles': 1}], # Assign to top level role as an inherited assignment to the # domain - 'assignments': [{'user': 0, 'role': 0, 'domain': 0, - 'inherited_to_projects': True}], + 'assignments': [ + { + 'user': 0, + 'role': 0, + 'domain': 0, + 'inherited_to_projects': True, + } + ], 'tests': [ # List all direct assignments for user[0], this should just # show the one top level role assignment - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0, - 'inherited_to_projects': 'projects'}]}, + { + 'params': {'user': 0}, + 'results': [ + { + 'user': 0, + 'role': 0, + 'domain': 0, + 'inherited_to_projects': 'projects', + } + ], + }, # List in effective mode - we should only see the initial and # implied role on the project (since inherited roles are not # active on their anchor point). - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0, - 'indirect': {'domain': 0}}, - {'user': 0, 'role': 1, 'project': 0, - 'indirect': {'domain': 0, 'role': 0}}]}, - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + { + 'user': 0, + 'role': 0, + 'project': 0, + 'indirect': {'domain': 0}, + }, + { + 'user': 0, + 'role': 1, + 'project': 0, + 'indirect': {'domain': 0, 'role': 0}, + }, + ], + }, + ], } self.execute_assignment_plan(test_plan) def test_role_assignments_domain_specific_with_implied_roles(self): test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1, 'roles': 2}, - 'roles': 2}, + 'entities': { + 'domains': {'users': 1, 'projects': 1, 'roles': 2}, + 'roles': 2, + }, # Two level tree of implied roles, with the top and 1st level being # domain specific roles, and the bottom level being inferred global # roles. - 'implied_roles': [{'role': 0, 'implied_roles': [1]}, - {'role': 1, 'implied_roles': [2, 3]}], + 'implied_roles': [ + {'role': 0, 'implied_roles': [1]}, + {'role': 1, 'implied_roles': [2, 3]}, + ], 'assignments': [{'user': 0, 'role': 0, 'project': 0}], 'tests': [ # List all direct assignments for user[0], this should just # show the one top level role assignment, even though this is a # domain specific role (since we are in non-effective mode and # we show any direct role assignment in that mode). - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'project': 0}]}, + { + 'params': {'user': 0}, + 'results': [{'user': 0, 'role': 0, 'project': 0}], + }, # Now the effective ones - so the implied roles should be # expanded out, as well as any domain specific roles should be # removed. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 2, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}]}, - ] + { + 'params': {'user': 0, 'effective': True}, + 'results': [ + { + 'user': 0, + 'role': 2, + 'project': 0, + 'indirect': {'role': 1}, + }, + { + 'user': 0, + 'role': 3, + 'project': 0, + 'indirect': {'role': 1}, + }, + ], + }, + ], } self.execute_assignment_plan(test_plan) @@ -3979,7 +4830,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.check_system_grant_for_user, user_id, - role['id'] + role['id'], ) PROVIDERS.assignment_api.create_system_grant_for_user( @@ -4018,7 +4869,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.check_system_grant_for_user, user_id, - uuid.uuid4().hex + uuid.uuid4().hex, ) def test_check_system_grant_for_user_with_invalid_user_fails(self): @@ -4028,7 +4879,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.check_system_grant_for_user, uuid.uuid4().hex, - role['id'] + role['id'], ) def test_delete_system_grant_for_user_with_invalid_role_fails(self): @@ -4043,7 +4894,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.delete_system_grant_for_user, user_id, - uuid.uuid4().hex + uuid.uuid4().hex, ) def test_delete_system_grant_for_user_with_invalid_user_fails(self): @@ -4058,7 +4909,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.delete_system_grant_for_user, uuid.uuid4().hex, - role['id'] + role['id'], ) def test_list_system_grants_for_user_returns_empty_list(self): @@ -4079,7 +4930,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.ValidationError, PROVIDERS.assignment_api.create_system_grant_for_user, user_id, - role['id'] + role['id'], ) def test_create_system_grant_for_group(self): @@ -4129,7 +4980,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.check_system_grant_for_group, group_id, - role['id'] + role['id'], ) PROVIDERS.assignment_api.create_system_grant_for_group( @@ -4168,7 +5019,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.check_system_grant_for_group, group_id, - uuid.uuid4().hex + uuid.uuid4().hex, ) def test_check_system_grant_for_group_with_invalid_group_fails(self): @@ -4178,7 +5029,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.check_system_grant_for_group, uuid.uuid4().hex, - role['id'] + role['id'], ) def test_delete_system_grant_for_group_with_invalid_role_fails(self): @@ -4193,7 +5044,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.delete_system_grant_for_group, group_id, - uuid.uuid4().hex + uuid.uuid4().hex, ) def test_delete_system_grant_for_group_with_invalid_group_fails(self): @@ -4208,7 +5059,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.RoleAssignmentNotFound, PROVIDERS.assignment_api.delete_system_grant_for_group, uuid.uuid4().hex, - role['id'] + role['id'], ) def test_list_system_grants_for_group_returns_empty_list(self): @@ -4229,7 +5080,7 @@ class SystemAssignmentTests(AssignmentTestHelperMixin): exception.ValidationError, PROVIDERS.assignment_api.create_system_grant_for_group, group_id, - role['id'] + role['id'], ) def test_delete_role_with_system_assignments(self): diff --git a/keystone/tests/unit/assignment/test_core.py b/keystone/tests/unit/assignment/test_core.py index 7aee3e42ac..b053faae8f 100644 --- a/keystone/tests/unit/assignment/test_core.py +++ b/keystone/tests/unit/assignment/test_core.py @@ -27,14 +27,18 @@ PROVIDERS = provider_api.ProviderAPIs class RoleTests(object): def test_get_role_returns_not_found(self): - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.get_role, - uuid.uuid4().hex) + self.assertRaises( + exception.RoleNotFound, + PROVIDERS.role_api.get_role, + uuid.uuid4().hex, + ) def test_get_unique_role_by_name_returns_not_found(self): - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.get_unique_role_by_name, - uuid.uuid4().hex) + self.assertRaises( + exception.RoleNotFound, + PROVIDERS.role_api.get_unique_role_by_name, + uuid.uuid4().hex, + ) def test_create_duplicate_role_name_fails(self): role_id = uuid.uuid4().hex @@ -42,10 +46,12 @@ class RoleTests(object): PROVIDERS.role_api.create_role(role_id, role) new_role_id = uuid.uuid4().hex role['id'] = new_role_id - self.assertRaises(exception.Conflict, - PROVIDERS.role_api.create_role, - new_role_id, - role) + self.assertRaises( + exception.Conflict, + PROVIDERS.role_api.create_role, + new_role_id, + role, + ) def test_rename_duplicate_role_name_fails(self): role_id1 = uuid.uuid4().hex @@ -55,10 +61,9 @@ class RoleTests(object): PROVIDERS.role_api.create_role(role_id1, role1) PROVIDERS.role_api.create_role(role_id2, role2) role1['name'] = 'fake2name' - self.assertRaises(exception.Conflict, - PROVIDERS.role_api.update_role, - role_id1, - role1) + self.assertRaises( + exception.Conflict, PROVIDERS.role_api.update_role, role_id1, role1 + ) def test_role_crud(self): role = unit.new_role_ref() @@ -79,16 +84,16 @@ class RoleTests(object): self.assertDictEqual(role_ref_dict, updated_role_ref) PROVIDERS.role_api.delete_role(role['id']) - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.get_role, - role['id']) + self.assertRaises( + exception.RoleNotFound, PROVIDERS.role_api.get_role, role['id'] + ) def test_role_crud_without_description(self): role = { 'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, 'domain_id': None, - 'options': {} + 'options': {}, } self.role_api.create_role(role['id'], role) role_ref = self.role_api.get_role(role['id']) @@ -105,16 +110,18 @@ class RoleTests(object): self.assertDictEqual(role_ref_dict, updated_role_ref) self.role_api.delete_role(role['id']) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role['id']) + self.assertRaises( + exception.RoleNotFound, self.role_api.get_role, role['id'] + ) def test_update_role_returns_not_found(self): role = unit.new_role_ref() - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.update_role, - role['id'], - role) + self.assertRaises( + exception.RoleNotFound, + PROVIDERS.role_api.update_role, + role['id'], + role, + ) def test_list_roles(self): roles = PROVIDERS.role_api.list_roles() @@ -139,8 +146,9 @@ class RoleTests(object): # Invalidate Cache PROVIDERS.role_api.get_role.invalidate(PROVIDERS.role_api, role_id) # Verify get_role returns the new role_ref - self.assertDictEqual(updated_role_ref, - PROVIDERS.role_api.get_role(role_id)) + self.assertDictEqual( + updated_role_ref, PROVIDERS.role_api.get_role(role_id) + ) # Update role back to original via the assignment api manager PROVIDERS.role_api.update_role(role_id, role_ref) # Verify get_role returns the original role ref @@ -152,18 +160,18 @@ class RoleTests(object): # Invalidate cache PROVIDERS.role_api.get_role.invalidate(PROVIDERS.role_api, role_id) # Verify RoleNotFound is now raised - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.get_role, - role_id) + self.assertRaises( + exception.RoleNotFound, PROVIDERS.role_api.get_role, role_id + ) # recreate role PROVIDERS.role_api.create_role(role_id, role) PROVIDERS.role_api.get_role(role_id) # delete role via the assignment api manager PROVIDERS.role_api.delete_role(role_id) # verity RoleNotFound is now raised - self.assertRaises(exception.RoleNotFound, - PROVIDERS.role_api.get_role, - role_id) + self.assertRaises( + exception.RoleNotFound, PROVIDERS.role_api.get_role, role_id + ) def test_create_role_immutable(self): role = unit.new_role_ref() @@ -174,9 +182,11 @@ class RoleTests(object): self.assertTrue('options' in role_created) self.assertTrue('options' in role_via_manager) self.assertTrue( - role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) self.assertTrue( - role_created['options'][ro_opt.IMMUTABLE_OPT.option_name]) + role_created['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) def test_cannot_update_immutable_role(self): role = unit.new_role_ref() @@ -184,10 +194,12 @@ class RoleTests(object): role['options'][ro_opt.IMMUTABLE_OPT.option_name] = True PROVIDERS.role_api.create_role(role_id, role) update_role = {'name': uuid.uuid4().hex} - self.assertRaises(exception.ResourceUpdateForbidden, - PROVIDERS.role_api.update_role, - role_id, - update_role) + self.assertRaises( + exception.ResourceUpdateForbidden, + PROVIDERS.role_api.update_role, + role_id, + update_role, + ) def test_cannot_update_immutable_role_while_unsetting_immutable(self): role = unit.new_role_ref() @@ -196,47 +208,50 @@ class RoleTests(object): PROVIDERS.role_api.create_role(role_id, role) update_role = { 'name': uuid.uuid4().hex, - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - } + 'options': {ro_opt.IMMUTABLE_OPT.option_name: True}, } - self.assertRaises(exception.ResourceUpdateForbidden, - PROVIDERS.role_api.update_role, - role_id, - update_role) + self.assertRaises( + exception.ResourceUpdateForbidden, + PROVIDERS.role_api.update_role, + role_id, + update_role, + ) def test_cannot_delete_immutable_role(self): role = unit.new_role_ref() role_id = role['id'] role['options'][ro_opt.IMMUTABLE_OPT.option_name] = True PROVIDERS.role_api.create_role(role_id, role) - self.assertRaises(exception.ResourceDeleteForbidden, - PROVIDERS.role_api.delete_role, - role_id) + self.assertRaises( + exception.ResourceDeleteForbidden, + PROVIDERS.role_api.delete_role, + role_id, + ) def test_update_role_set_immutable(self): role = unit.new_role_ref() role_id = role['id'] PROVIDERS.role_api.create_role(role_id, role) - update_role = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - } - } + update_role = {'options': {ro_opt.IMMUTABLE_OPT.option_name: True}} role_via_manager = PROVIDERS.role_api.get_role(role_id) self.assertTrue('options' in role_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options'] + ) role_update = PROVIDERS.role_api.update_role(role_id, update_role) role_via_manager = PROVIDERS.role_api.get_role(role_id) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in role_update['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_update['options'] + ) self.assertTrue( - role_update['options'][ro_opt.IMMUTABLE_OPT.option_name]) + role_update['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options'] + ) self.assertTrue( - role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) def test_update_role_set_immutable_with_additional_updates(self): role = unit.new_role_ref() @@ -244,26 +259,29 @@ class RoleTests(object): PROVIDERS.role_api.create_role(role_id, role) update_role = { 'name': uuid.uuid4().hex, - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - } + 'options': {ro_opt.IMMUTABLE_OPT.option_name: True}, } role_via_manager = PROVIDERS.role_api.get_role(role_id) self.assertTrue('options' in role_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options'] + ) role_update = PROVIDERS.role_api.update_role(role_id, update_role) role_via_manager = PROVIDERS.role_api.get_role(role_id) self.assertEqual(role_update['name'], update_role['name']) self.assertEqual(role_via_manager['name'], update_role['name']) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in role_update['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_update['options'] + ) self.assertTrue( - role_update['options'][ro_opt.IMMUTABLE_OPT.option_name]) + role_update['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options'] + ) self.assertTrue( - role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) def test_update_role_unset_immutable(self): role = unit.new_role_ref() @@ -273,29 +291,26 @@ class RoleTests(object): role_via_manager = PROVIDERS.role_api.get_role(role_id) self.assertTrue('options' in role_via_manager) self.assertTrue( - role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) - update_role = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: False - } - } + role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) + update_role = {'options': {ro_opt.IMMUTABLE_OPT.option_name: False}} PROVIDERS.role_api.update_role(role_id, update_role) role_via_manager = PROVIDERS.role_api.get_role(role_id) self.assertTrue('options' in role_via_manager) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options'] + ) self.assertFalse( - role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) - update_role = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: None - } - } + role_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) + update_role = {'options': {ro_opt.IMMUTABLE_OPT.option_name: None}} role_updated = PROVIDERS.role_api.update_role(role_id, update_role) role_via_manager = PROVIDERS.role_api.get_role(role_id) self.assertTrue('options' in role_updated) self.assertTrue('options' in role_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in role_updated['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_updated['options'] + ) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in role_via_manager['options'] + ) diff --git a/keystone/tests/unit/auth/plugins/test_mapped.py b/keystone/tests/unit/auth/plugins/test_mapped.py index a79bdaa6c7..6b943da6bb 100644 --- a/keystone/tests/unit/auth/plugins/test_mapped.py +++ b/keystone/tests/unit/auth/plugins/test_mapped.py @@ -34,117 +34,178 @@ class TestMappedPlugin(unit.TestCase): self.member_role_id = uuid.uuid4().hex self.member_role_name = "member" self.existing_roles = { - self.member_role_name: {'id': self.member_role_id}} - self.shadow_project_mock = {'name': "test-project", - 'roles': [{'name': self.member_role_name}]} + self.member_role_name: {'id': self.member_role_id} + } + self.shadow_project_mock = { + 'name': "test-project", + 'roles': [{'name': self.member_role_name}], + } self.shadow_project_in_domain_mock = { - 'name': "test-project-in-domain", 'domain': self.domain_mock, - 'roles': [{'name': self.member_role_name}]} - self.shadow_projects_mock = [self.shadow_project_mock, - self.shadow_project_in_domain_mock] - self.user_mock = {'id': uuid.uuid4().hex, - 'name': "test-user"} + 'name': "test-project-in-domain", + 'domain': self.domain_mock, + 'roles': [{'name': self.member_role_name}], + } + self.shadow_projects_mock = [ + self.shadow_project_mock, + self.shadow_project_in_domain_mock, + ] + self.user_mock = {'id': uuid.uuid4().hex, 'name': "test-user"} def test_configure_project_domain_no_project_domain(self): - mapped.configure_project_domain(self.shadow_project_mock, - self.idp_domain_uuid_mock, - self.resource_api_mock) + mapped.configure_project_domain( + self.shadow_project_mock, + self.idp_domain_uuid_mock, + self.resource_api_mock, + ) self.assertIn("domain", self.shadow_project_mock) - self.assertEqual(self.idp_domain_uuid_mock, - self.shadow_project_mock['domain']['id']) + self.assertEqual( + self.idp_domain_uuid_mock, self.shadow_project_mock['domain']['id'] + ) def test_configure_project_domain_with_domain_id(self): self.shadow_project_mock['domain'] = self.domain_mock - mapped.configure_project_domain(self.shadow_project_mock, - self.idp_domain_uuid_mock, - self.resource_api_mock) + mapped.configure_project_domain( + self.shadow_project_mock, + self.idp_domain_uuid_mock, + self.resource_api_mock, + ) self.assertIn("domain", self.shadow_project_mock) - self.assertEqual(self.domain_uuid_mock, - self.shadow_project_mock['domain']['id']) + self.assertEqual( + self.domain_uuid_mock, self.shadow_project_mock['domain']['id'] + ) def test_configure_project_domain_with_domain_name(self): domain_name = "test-domain" self.shadow_project_mock['domain'] = {'name': domain_name} - self.resource_api_mock.get_domain_by_name.return_value =\ + self.resource_api_mock.get_domain_by_name.return_value = ( self.domain_mock - mapped.configure_project_domain(self.shadow_project_mock, - self.idp_domain_uuid_mock, - self.resource_api_mock) + ) + mapped.configure_project_domain( + self.shadow_project_mock, + self.idp_domain_uuid_mock, + self.resource_api_mock, + ) self.assertIn("domain", self.shadow_project_mock) - self.assertEqual(self.domain_uuid_mock, - self.shadow_project_mock['domain']['id']) + self.assertEqual( + self.domain_uuid_mock, self.shadow_project_mock['domain']['id'] + ) self.resource_api_mock.get_domain_by_name.assert_called_with( - domain_name) + domain_name + ) def test_handle_projects_from_mapping_project_exists(self): project_mock_1 = self.create_project_mock_for_shadow_project( - self.shadow_project_mock) + self.shadow_project_mock + ) project_mock_2 = self.create_project_mock_for_shadow_project( - self.shadow_project_in_domain_mock) + self.shadow_project_in_domain_mock + ) self.resource_api_mock.get_project_by_name.side_effect = [ - project_mock_1, project_mock_2] - mapped.handle_projects_from_mapping(self.shadow_projects_mock, - self.idp_domain_uuid_mock, - self.existing_roles, - self.user_mock, - self.assignment_api_mock, - self.resource_api_mock - ) - self.resource_api_mock.get_project_by_name.assert_has_calls([ - mock.call(self.shadow_project_in_domain_mock['name'], - self.shadow_project_in_domain_mock['domain']['id']), - mock.call(self.shadow_project_mock['name'], - self.idp_domain_uuid_mock)], any_order=True) - self.assignment_api_mock.create_grant.assert_has_calls([ - mock.call(self.member_role_id, user_id=self.user_mock['id'], - project_id=project_mock_1['id']), - mock.call(self.member_role_id, user_id=self.user_mock['id'], - project_id=project_mock_2['id']) - ]) + project_mock_1, + project_mock_2, + ] + mapped.handle_projects_from_mapping( + self.shadow_projects_mock, + self.idp_domain_uuid_mock, + self.existing_roles, + self.user_mock, + self.assignment_api_mock, + self.resource_api_mock, + ) + self.resource_api_mock.get_project_by_name.assert_has_calls( + [ + mock.call( + self.shadow_project_in_domain_mock['name'], + self.shadow_project_in_domain_mock['domain']['id'], + ), + mock.call( + self.shadow_project_mock['name'], self.idp_domain_uuid_mock + ), + ], + any_order=True, + ) + self.assignment_api_mock.create_grant.assert_has_calls( + [ + mock.call( + self.member_role_id, + user_id=self.user_mock['id'], + project_id=project_mock_1['id'], + ), + mock.call( + self.member_role_id, + user_id=self.user_mock['id'], + project_id=project_mock_2['id'], + ), + ] + ) @mock.patch("uuid.UUID.hex", new_callable=mock.PropertyMock) def test_handle_projects_from_mapping_create_projects(self, uuid_mock): uuid_mock.return_value = "uuid" project_mock_1 = self.create_project_mock_for_shadow_project( - self.shadow_project_mock) + self.shadow_project_mock + ) project_mock_2 = self.create_project_mock_for_shadow_project( - self.shadow_project_in_domain_mock) + self.shadow_project_in_domain_mock + ) self.resource_api_mock.get_project_by_name.side_effect = [ ProjectNotFound(project_id=project_mock_1['name']), - ProjectNotFound(project_id=project_mock_2['name'])] + ProjectNotFound(project_id=project_mock_2['name']), + ] self.resource_api_mock.create_project.side_effect = [ - project_mock_1, project_mock_2] - mapped.handle_projects_from_mapping(self.shadow_projects_mock, - self.idp_domain_uuid_mock, - self.existing_roles, - self.user_mock, - self.assignment_api_mock, - self.resource_api_mock - ) - self.resource_api_mock.get_project_by_name.assert_has_calls([ - mock.call(self.shadow_project_in_domain_mock['name'], - self.shadow_project_in_domain_mock['domain']['id']), - mock.call(self.shadow_project_mock['name'], - self.idp_domain_uuid_mock)], any_order=True) + project_mock_1, + project_mock_2, + ] + mapped.handle_projects_from_mapping( + self.shadow_projects_mock, + self.idp_domain_uuid_mock, + self.existing_roles, + self.user_mock, + self.assignment_api_mock, + self.resource_api_mock, + ) + self.resource_api_mock.get_project_by_name.assert_has_calls( + [ + mock.call( + self.shadow_project_in_domain_mock['name'], + self.shadow_project_in_domain_mock['domain']['id'], + ), + mock.call( + self.shadow_project_mock['name'], self.idp_domain_uuid_mock + ), + ], + any_order=True, + ) expected_project_ref1 = { 'id': "uuid", 'name': self.shadow_project_mock['name'], - 'domain_id': self.idp_domain_uuid_mock + 'domain_id': self.idp_domain_uuid_mock, } expected_project_ref2 = { 'id': "uuid", 'name': self.shadow_project_in_domain_mock['name'], - 'domain_id': self.shadow_project_in_domain_mock['domain']['id'] + 'domain_id': self.shadow_project_in_domain_mock['domain']['id'], } - self.resource_api_mock.create_project.assert_has_calls([ - mock.call(expected_project_ref1['id'], expected_project_ref1), - mock.call(expected_project_ref2['id'], expected_project_ref2)]) - self.assignment_api_mock.create_grant.assert_has_calls([ - mock.call(self.member_role_id, user_id=self.user_mock['id'], - project_id=project_mock_1['id']), - mock.call(self.member_role_id, user_id=self.user_mock['id'], - project_id=project_mock_2['id']) - ]) + self.resource_api_mock.create_project.assert_has_calls( + [ + mock.call(expected_project_ref1['id'], expected_project_ref1), + mock.call(expected_project_ref2['id'], expected_project_ref2), + ] + ) + self.assignment_api_mock.create_grant.assert_has_calls( + [ + mock.call( + self.member_role_id, + user_id=self.user_mock['id'], + project_id=project_mock_1['id'], + ), + mock.call( + self.member_role_id, + user_id=self.user_mock['id'], + project_id=project_mock_2['id'], + ), + ] + ) def create_project_mock_for_shadow_project(self, shadow_project): project = shadow_project.copy() diff --git a/keystone/tests/unit/auth/test_controllers.py b/keystone/tests/unit/auth/test_controllers.py index 2c74962fd7..c26e0f1c71 100644 --- a/keystone/tests/unit/auth/test_controllers.py +++ b/keystone/tests/unit/auth/test_controllers.py @@ -37,21 +37,28 @@ class TestLoadAuthMethod(unit.BaseTestCase): # Setup stevedore.DriverManager to return a driver for the plugin extension_ = extension.Extension( - plugin_name, entry_point=mock.sentinel.entry_point, + plugin_name, + entry_point=mock.sentinel.entry_point, plugin=mock.sentinel.plugin, - obj=mock.sentinel.driver) + obj=mock.sentinel.driver, + ) auth_plugin_namespace = 'keystone.auth.%s' % method fake_driver_manager = stevedore.DriverManager.make_test_instance( - extension_, namespace=auth_plugin_namespace) + extension_, namespace=auth_plugin_namespace + ) - driver_manager_mock = self.useFixture(fixtures.MockPatchObject( - stevedore, 'DriverManager', return_value=fake_driver_manager)).mock + driver_manager_mock = self.useFixture( + fixtures.MockPatchObject( + stevedore, 'DriverManager', return_value=fake_driver_manager + ) + ).mock driver = core.load_auth_method(method) self.assertEqual(auth_plugin_namespace, fake_driver_manager.namespace) driver_manager_mock.assert_called_once_with( - auth_plugin_namespace, plugin_name, invoke_on_load=True) + auth_plugin_namespace, plugin_name, invoke_on_load=True + ) self.assertIs(mock.sentinel.driver, driver) def test_entrypoint_fails(self): @@ -65,7 +72,10 @@ class TestLoadAuthMethod(unit.BaseTestCase): # stevedore.DriverManager raises RuntimeError if it can't load the # driver. - self.useFixture(fixtures.MockPatchObject( - stevedore, 'DriverManager', side_effect=RuntimeError)) + self.useFixture( + fixtures.MockPatchObject( + stevedore, 'DriverManager', side_effect=RuntimeError + ) + ) self.assertRaises(RuntimeError, core.load_auth_method, method) diff --git a/keystone/tests/unit/auth/test_schema.py b/keystone/tests/unit/auth/test_schema.py index 520619c9cb..d9880ccf5b 100644 --- a/keystone/tests/unit/auth/test_schema.py +++ b/keystone/tests/unit/auth/test_schema.py @@ -20,7 +20,9 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def _expect_failure(self, post_data): self.assertRaises( exception.SchemaValidationError, - schema.validate_issue_token_auth, post_data) + schema.validate_issue_token_auth, + post_data, + ) def test_auth_not_object_ex(self): self._expect_failure('something') @@ -166,9 +168,7 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): 'password': { 'user': { 'id': 'something', - 'domain': { - 'name': {} - }, + 'domain': {'name': {}}, }, }, }, @@ -183,9 +183,7 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): 'password': { 'user': { 'id': 'something', - 'domain': { - 'id': {} - }, + 'domain': {'id': {}}, }, }, }, @@ -238,14 +236,18 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_scope_not_object_or_string_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': 1, } self._expect_failure(p) def test_project_not_object_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'project': 'something', }, @@ -254,7 +256,9 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_project_name_not_string_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'project': { 'name': {}, @@ -265,7 +269,9 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_project_id_not_string_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'project': { 'id': {}, @@ -276,7 +282,9 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_project_no_id_or_name_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'project': {}, }, @@ -285,7 +293,9 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_project_domain_not_object_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'project': { 'id': 'something', @@ -297,11 +307,15 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_project_domain_name_not_string_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'project': { 'id': 'something', - 'domain': {'name': {}, }, + 'domain': { + 'name': {}, + }, }, }, } @@ -309,11 +323,15 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_project_domain_id_not_string_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'project': { 'id': 'something', - 'domain': {'id': {}, }, + 'domain': { + 'id': {}, + }, }, }, } @@ -321,7 +339,9 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_project_domain_no_id_or_name_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'project': { 'id': 'something', @@ -333,7 +353,9 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_domain_not_object_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'domain': 'something', }, @@ -342,25 +364,35 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_domain_id_not_string_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { - 'domain': {'id': {}, }, + 'domain': { + 'id': {}, + }, }, } self._expect_failure(p) def test_domain_name_not_string_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { - 'domain': {'name': {}, }, + 'domain': { + 'name': {}, + }, }, } self._expect_failure(p) def test_domain_no_id_or_name_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'domain': {}, }, @@ -369,7 +401,9 @@ class TestValidateIssueTokenAuth(unit.BaseTestCase): def test_trust_not_object_ex(self): p = { - 'identity': {'methods': [], }, + 'identity': { + 'methods': [], + }, 'scope': { 'OS-TRUST:trust': 'something', }, diff --git a/keystone/tests/unit/backend/core_ldap.py b/keystone/tests/unit/backend/core_ldap.py index 36779f9f80..85e6d0c135 100644 --- a/keystone/tests/unit/backend/core_ldap.py +++ b/keystone/tests/unit/backend/core_ldap.py @@ -31,8 +31,9 @@ def create_group_container(identity_api): group_api = identity_api.driver.group conn = group_api.get_connection() dn = 'ou=Groups,cn=example,cn=com' - conn.add_s(dn, [('objectclass', ['organizationalUnit']), - ('ou', ['Groups'])]) + conn.add_s( + dn, [('objectclass', ['organizationalUnit']), ('ou', ['Groups'])] + ) class BaseBackendLdapCommon(object): @@ -65,17 +66,15 @@ class BaseBackendLdapCommon(object): return config_files def get_user_enabled_vals(self, user): - user_dn = ( - PROVIDERS.identity_api.driver.user._id_to_dn_string( - user['id'] - ) + user_dn = PROVIDERS.identity_api.driver.user._id_to_dn_string( + user['id'] ) enabled_attr_name = CONF.ldap.user_enabled_attribute ldap_ = PROVIDERS.identity_api.driver.user.get_connection() - res = ldap_.search_s(user_dn, - ldap.SCOPE_BASE, - u'(sn=%s)' % user['name']) + res = ldap_.search_s( + user_dn, ldap.SCOPE_BASE, u'(sn=%s)' % user['name'] + ) if enabled_attr_name in res[0][1]: return res[0][1][enabled_attr_name] else: @@ -102,8 +101,9 @@ class BaseBackendLdapIdentitySqlEverythingElse(unit.SQLDriverOverrides): """Mixin base for Identity LDAP, everything else SQL backend tests.""" def config_files(self): - config_files = super(BaseBackendLdapIdentitySqlEverythingElse, - self).config_files() + config_files = super( + BaseBackendLdapIdentitySqlEverythingElse, self + ).config_files() config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf')) return config_files @@ -119,8 +119,9 @@ class BaseBackendLdapIdentitySqlEverythingElse(unit.SQLDriverOverrides): self.user_foo['enabled'] = True def config_overrides(self): - super(BaseBackendLdapIdentitySqlEverythingElse, - self).config_overrides() + super( + BaseBackendLdapIdentitySqlEverythingElse, self + ).config_overrides() self.config_fixture.config(group='identity', driver='ldap') self.config_fixture.config(group='resource', driver='sql') self.config_fixture.config(group='assignment', driver='sql') @@ -137,7 +138,9 @@ class BaseBackendLdapIdentitySqlEverythingElseWithMapping(object): """ def config_overrides(self): - super(BaseBackendLdapIdentitySqlEverythingElseWithMapping, - self).config_overrides() - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) + super( + BaseBackendLdapIdentitySqlEverythingElseWithMapping, self + ).config_overrides() + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=False + ) diff --git a/keystone/tests/unit/catalog/test_backends.py b/keystone/tests/unit/catalog/test_backends.py index 513e5c3c3e..726e74e5f2 100644 --- a/keystone/tests/unit/catalog/test_backends.py +++ b/keystone/tests/unit/catalog/test_backends.py @@ -71,16 +71,22 @@ class CatalogTests(object): # delete PROVIDERS.catalog_api.delete_region(parent_region_id) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.delete_region, - parent_region_id) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - parent_region_id) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.delete_region, + parent_region_id, + ) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + parent_region_id, + ) # Ensure the child is also gone... - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - region_id) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + region_id, + ) def _create_region_with_parent_id(self, parent_id=None): new_region = unit.new_region_ref(parent_region_id=parent_id) @@ -113,27 +119,30 @@ class CatalogTests(object): PROVIDERS.catalog_api.driver.update_region(region_id, updated_region) self.assertLessEqual( new_region.items(), - PROVIDERS.catalog_api.get_region(region_id).items() + PROVIDERS.catalog_api.get_region(region_id).items(), ) PROVIDERS.catalog_api.get_region.invalidate( PROVIDERS.catalog_api, region_id ) self.assertLessEqual( updated_region.items(), - PROVIDERS.catalog_api.get_region(region_id).items() + PROVIDERS.catalog_api.get_region(region_id).items(), ) # delete the region PROVIDERS.catalog_api.driver.delete_region(region_id) # still get the old region self.assertLessEqual( updated_region.items(), - PROVIDERS.catalog_api.get_region(region_id).items() + PROVIDERS.catalog_api.get_region(region_id).items(), ) PROVIDERS.catalog_api.get_region.invalidate( PROVIDERS.catalog_api, region_id ) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, region_id) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + region_id, + ) @unit.skip_if_cache_disabled('catalog') def test_invalidate_cache_when_updating_region(self): @@ -150,8 +159,9 @@ class CatalogTests(object): # assert that we can get the new region current_region = PROVIDERS.catalog_api.get_region(region_id) - self.assertEqual(new_description['description'], - current_region['description']) + self.assertEqual( + new_description['description'], current_region['description'] + ) def test_update_region_extras(self): new_region = unit.new_region_ref() @@ -159,64 +169,75 @@ class CatalogTests(object): PROVIDERS.catalog_api.create_region(new_region) email = 'keystone@openstack.org' - new_ref = {'description': uuid.uuid4().hex, - 'email': email} + new_ref = {'description': uuid.uuid4().hex, 'email': email} PROVIDERS.catalog_api.update_region(region_id, new_ref) current_region = PROVIDERS.catalog_api.get_region(region_id) - self.assertEqual(email, - current_region['email']) + self.assertEqual(email, current_region['email']) def test_create_region_with_duplicate_id(self): new_region = unit.new_region_ref() PROVIDERS.catalog_api.create_region(new_region) # Create region again with duplicate id - self.assertRaises(exception.Conflict, - PROVIDERS.catalog_api.create_region, - new_region) + self.assertRaises( + exception.Conflict, PROVIDERS.catalog_api.create_region, new_region + ) def test_get_region_returns_not_found(self): - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - uuid.uuid4().hex) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + uuid.uuid4().hex, + ) def test_delete_region_returns_not_found(self): - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.delete_region, - uuid.uuid4().hex) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.delete_region, + uuid.uuid4().hex, + ) def test_create_region_invalid_parent_region_returns_not_found(self): new_region = unit.new_region_ref(parent_region_id=uuid.uuid4().hex) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.create_region, - new_region) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.create_region, + new_region, + ) def test_avoid_creating_circular_references_in_regions_update(self): region_one = self._create_region_with_parent_id() # self circle: region_one->region_one - self.assertRaises(exception.CircularRegionHierarchyError, - PROVIDERS.catalog_api.update_region, - region_one['id'], - {'parent_region_id': region_one['id']}) + self.assertRaises( + exception.CircularRegionHierarchyError, + PROVIDERS.catalog_api.update_region, + region_one['id'], + {'parent_region_id': region_one['id']}, + ) # region_one->region_two->region_one region_two = self._create_region_with_parent_id(region_one['id']) - self.assertRaises(exception.CircularRegionHierarchyError, - PROVIDERS.catalog_api.update_region, - region_one['id'], - {'parent_region_id': region_two['id']}) + self.assertRaises( + exception.CircularRegionHierarchyError, + PROVIDERS.catalog_api.update_region, + region_one['id'], + {'parent_region_id': region_two['id']}, + ) # region_one region_two->region_three->region_four->region_two region_three = self._create_region_with_parent_id(region_two['id']) region_four = self._create_region_with_parent_id(region_three['id']) - self.assertRaises(exception.CircularRegionHierarchyError, - PROVIDERS.catalog_api.update_region, - region_two['id'], - {'parent_region_id': region_four['id']}) + self.assertRaises( + exception.CircularRegionHierarchyError, + PROVIDERS.catalog_api.update_region, + region_two['id'], + {'parent_region_id': region_four['id']}, + ) - @mock.patch.object(base.CatalogDriverBase, - "_ensure_no_circle_in_hierarchical_regions") + @mock.patch.object( + base.CatalogDriverBase, "_ensure_no_circle_in_hierarchical_regions" + ) def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle): # turn off the enforcement so that cycles can be created for the test mock_ensure_on_circle.return_value = None @@ -225,44 +246,56 @@ class CatalogTests(object): # self circle: region_one->region_one PROVIDERS.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_one['id']}) + region_one['id'], {'parent_region_id': region_one['id']} + ) PROVIDERS.catalog_api.delete_region(region_one['id']) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - region_one['id']) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + region_one['id'], + ) # region_one->region_two->region_one region_one = self._create_region_with_parent_id() region_two = self._create_region_with_parent_id(region_one['id']) PROVIDERS.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_two['id']}) + region_one['id'], {'parent_region_id': region_two['id']} + ) PROVIDERS.catalog_api.delete_region(region_one['id']) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - region_one['id']) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - region_two['id']) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + region_one['id'], + ) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + region_two['id'], + ) # region_one->region_two->region_three->region_one region_one = self._create_region_with_parent_id() region_two = self._create_region_with_parent_id(region_one['id']) region_three = self._create_region_with_parent_id(region_two['id']) PROVIDERS.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_three['id']}) + region_one['id'], {'parent_region_id': region_three['id']} + ) PROVIDERS.catalog_api.delete_region(region_two['id']) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - region_two['id']) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - region_one['id']) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.get_region, - region_three['id']) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + region_two['id'], + ) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + region_one['id'], + ) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.get_region, + region_three['id'], + ) def test_service_crud(self): # create @@ -286,12 +319,16 @@ class CatalogTests(object): # delete PROVIDERS.catalog_api.delete_service(service_id) - self.assertRaises(exception.ServiceNotFound, - PROVIDERS.catalog_api.delete_service, - service_id) - self.assertRaises(exception.ServiceNotFound, - PROVIDERS.catalog_api.get_service, - service_id) + self.assertRaises( + exception.ServiceNotFound, + PROVIDERS.catalog_api.delete_service, + service_id, + ) + self.assertRaises( + exception.ServiceNotFound, + PROVIDERS.catalog_api.get_service, + service_id, + ) def _create_random_service(self): new_service = unit.new_service_ref() @@ -347,31 +384,35 @@ class CatalogTests(object): ) self.assertLessEqual( new_service.items(), - PROVIDERS.catalog_api.get_service(service_id).items() + PROVIDERS.catalog_api.get_service(service_id).items(), ) PROVIDERS.catalog_api.get_service.invalidate( PROVIDERS.catalog_api, service_id ) self.assertLessEqual( updated_service.items(), - PROVIDERS.catalog_api.get_service(service_id).items() + PROVIDERS.catalog_api.get_service(service_id).items(), ) # delete bypassing catalog api PROVIDERS.catalog_api.driver.delete_service(service_id) self.assertLessEqual( updated_service.items(), - PROVIDERS.catalog_api.get_service(service_id).items() + PROVIDERS.catalog_api.get_service(service_id).items(), ) PROVIDERS.catalog_api.get_service.invalidate( PROVIDERS.catalog_api, service_id ) - self.assertRaises(exception.ServiceNotFound, - PROVIDERS.catalog_api.delete_service, - service_id) - self.assertRaises(exception.ServiceNotFound, - PROVIDERS.catalog_api.get_service, - service_id) + self.assertRaises( + exception.ServiceNotFound, + PROVIDERS.catalog_api.delete_service, + service_id, + ) + self.assertRaises( + exception.ServiceNotFound, + PROVIDERS.catalog_api.get_service, + service_id, + ) @unit.skip_if_cache_disabled('catalog') def test_invalidate_cache_when_updating_service(self): @@ -396,26 +437,32 @@ class CatalogTests(object): PROVIDERS.catalog_api.create_service(service['id'], service) # create an endpoint attached to the service - endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) + endpoint = unit.new_endpoint_ref( + service_id=service['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) # deleting the service should also delete the endpoint PROVIDERS.catalog_api.delete_service(service['id']) - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.delete_endpoint, - endpoint['id']) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.get_endpoint, + endpoint['id'], + ) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.delete_endpoint, + endpoint['id'], + ) def test_cache_layer_delete_service_with_endpoint(self): service = unit.new_service_ref() PROVIDERS.catalog_api.create_service(service['id'], service) # create an endpoint attached to the service - endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) + endpoint = unit.new_endpoint_ref( + service_id=service['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) # cache the result PROVIDERS.catalog_api.get_service(service['id']) @@ -424,108 +471,144 @@ class CatalogTests(object): PROVIDERS.catalog_api.driver.delete_service(service['id']) self.assertLessEqual( endpoint.items(), - PROVIDERS.catalog_api.get_endpoint(endpoint['id']).items()) + PROVIDERS.catalog_api.get_endpoint(endpoint['id']).items(), + ) self.assertLessEqual( service.items(), - PROVIDERS.catalog_api.get_service(service['id']).items()) + PROVIDERS.catalog_api.get_service(service['id']).items(), + ) PROVIDERS.catalog_api.get_endpoint.invalidate( PROVIDERS.catalog_api, endpoint['id'] ) - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.delete_endpoint, - endpoint['id']) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.get_endpoint, + endpoint['id'], + ) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.delete_endpoint, + endpoint['id'], + ) # multiple endpoints associated with a service - second_endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) + second_endpoint = unit.new_endpoint_ref( + service_id=service['id'], region_id=None + ) PROVIDERS.catalog_api.create_service(service['id'], service) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) PROVIDERS.catalog_api.create_endpoint( second_endpoint['id'], second_endpoint ) PROVIDERS.catalog_api.delete_service(service['id']) - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.delete_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.get_endpoint, - second_endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.delete_endpoint, - second_endpoint['id']) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.get_endpoint, + endpoint['id'], + ) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.delete_endpoint, + endpoint['id'], + ) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.get_endpoint, + second_endpoint['id'], + ) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.delete_endpoint, + second_endpoint['id'], + ) def test_get_service_returns_not_found(self): - self.assertRaises(exception.ServiceNotFound, - PROVIDERS.catalog_api.get_service, - uuid.uuid4().hex) + self.assertRaises( + exception.ServiceNotFound, + PROVIDERS.catalog_api.get_service, + uuid.uuid4().hex, + ) def test_delete_service_returns_not_found(self): - self.assertRaises(exception.ServiceNotFound, - PROVIDERS.catalog_api.delete_service, - uuid.uuid4().hex) + self.assertRaises( + exception.ServiceNotFound, + PROVIDERS.catalog_api.delete_service, + uuid.uuid4().hex, + ) def test_create_endpoint_nonexistent_service(self): - endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex, - region_id=None) - self.assertRaises(exception.ValidationError, - PROVIDERS.catalog_api.create_endpoint, - endpoint['id'], - endpoint) + endpoint = unit.new_endpoint_ref( + service_id=uuid.uuid4().hex, region_id=None + ) + self.assertRaises( + exception.ValidationError, + PROVIDERS.catalog_api.create_endpoint, + endpoint['id'], + endpoint, + ) def test_update_endpoint_nonexistent_service(self): dummy_service, enabled_endpoint, dummy_disabled_endpoint = ( - self._create_endpoints()) + self._create_endpoints() + ) new_endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex) - self.assertRaises(exception.ValidationError, - PROVIDERS.catalog_api.update_endpoint, - enabled_endpoint['id'], - new_endpoint) + self.assertRaises( + exception.ValidationError, + PROVIDERS.catalog_api.update_endpoint, + enabled_endpoint['id'], + new_endpoint, + ) def test_create_endpoint_nonexistent_region(self): service = unit.new_service_ref() PROVIDERS.catalog_api.create_service(service['id'], service) endpoint = unit.new_endpoint_ref(service_id=service['id']) - self.assertRaises(exception.ValidationError, - PROVIDERS.catalog_api.create_endpoint, - endpoint['id'], - endpoint) + self.assertRaises( + exception.ValidationError, + PROVIDERS.catalog_api.create_endpoint, + endpoint['id'], + endpoint, + ) def test_update_endpoint_nonexistent_region(self): dummy_service, enabled_endpoint, dummy_disabled_endpoint = ( - self._create_endpoints()) + self._create_endpoints() + ) new_endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex) - self.assertRaises(exception.ValidationError, - PROVIDERS.catalog_api.update_endpoint, - enabled_endpoint['id'], - new_endpoint) + self.assertRaises( + exception.ValidationError, + PROVIDERS.catalog_api.update_endpoint, + enabled_endpoint['id'], + new_endpoint, + ) def test_get_endpoint_returns_not_found(self): - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.get_endpoint, - uuid.uuid4().hex) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.get_endpoint, + uuid.uuid4().hex, + ) def test_delete_endpoint_returns_not_found(self): - self.assertRaises(exception.EndpointNotFound, - PROVIDERS.catalog_api.delete_endpoint, - uuid.uuid4().hex) + self.assertRaises( + exception.EndpointNotFound, + PROVIDERS.catalog_api.delete_endpoint, + uuid.uuid4().hex, + ) def test_create_endpoint(self): service = unit.new_service_ref() PROVIDERS.catalog_api.create_service(service['id'], service) - endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) + endpoint = unit.new_endpoint_ref( + service_id=service['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint.copy()) def test_update_endpoint(self): dummy_service_ref, endpoint_ref, dummy_disabled_endpoint_ref = ( - self._create_endpoints()) + self._create_endpoints() + ) res = PROVIDERS.catalog_api.update_endpoint( endpoint_ref['id'], {'interface': 'private'} ) @@ -548,7 +631,8 @@ class CatalogTests(object): service_id=service_id, region_id=region, url='http://localhost/%s' % uuid.uuid4().hex, - **kwargs) + **kwargs + ) PROVIDERS.catalog_api.create_endpoint(ref['id'], ref) return ref @@ -564,7 +648,8 @@ class CatalogTests(object): # Create endpoints enabled_endpoint_ref = create_endpoint(service_id, region['id']) disabled_endpoint_ref = create_endpoint( - service_id, region['id'], enabled=False, interface='internal') + service_id, region['id'], enabled=False, interface='internal' + ) return service_ref, enabled_endpoint_ref, disabled_endpoint_ref @@ -574,9 +659,9 @@ class CatalogTests(object): expected_ids = set([uuid.uuid4().hex for _ in range(3)]) for endpoint_id in expected_ids: - endpoint = unit.new_endpoint_ref(service_id=service['id'], - id=endpoint_id, - region_id=None) + endpoint = unit.new_endpoint_ref( + service_id=service['id'], id=endpoint_id, region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) endpoints = PROVIDERS.catalog_api.list_endpoints() @@ -603,8 +688,9 @@ class CatalogTests(object): PROVIDERS.catalog_api.create_service(service['id'], service) # create an endpoint attached to the service - endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) + endpoint = unit.new_endpoint_ref( + service_id=service['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) # cache the endpoint diff --git a/keystone/tests/unit/catalog/test_core.py b/keystone/tests/unit/catalog/test_core.py index 7b74961012..766cab13a5 100644 --- a/keystone/tests/unit/catalog/test_core.py +++ b/keystone/tests/unit/catalog/test_core.py @@ -20,8 +20,9 @@ from keystone.tests import unit class FormatUrlTests(unit.BaseTestCase): def test_successful_formatting(self): - url_template = ('http://server:9090/' - '$(tenant_id)s/$(user_id)s/$(project_id)s') + url_template = ( + 'http://server:9090/' '$(tenant_id)s/$(user_id)s/$(project_id)s' + ) project_id = uuid.uuid4().hex values = {'tenant_id': 'A', 'user_id': 'B', 'project_id': project_id} actual_url = utils.format_url(url_template, values) @@ -30,29 +31,34 @@ class FormatUrlTests(unit.BaseTestCase): self.assertEqual(expected_url, actual_url) def test_raises_malformed_on_missing_key(self): - self.assertRaises(exception.MalformedEndpoint, - utils.format_url, - "http://server:9090/$(tenant_id)s", - {}) + self.assertRaises( + exception.MalformedEndpoint, + utils.format_url, + "http://server:9090/$(tenant_id)s", + {}, + ) def test_raises_malformed_on_wrong_type(self): - self.assertRaises(exception.MalformedEndpoint, - utils.format_url, - "http://server:9090/$(tenant_id)d", - {"tenant_id": 'A'}) + self.assertRaises( + exception.MalformedEndpoint, + utils.format_url, + "http://server:9090/$(tenant_id)d", + {"tenant_id": 'A'}, + ) def test_raises_malformed_on_incomplete_format(self): - self.assertRaises(exception.MalformedEndpoint, - utils.format_url, - "http://server:9090/$(tenant_id)", - {"tenant_id": 'A'}) + self.assertRaises( + exception.MalformedEndpoint, + utils.format_url, + "http://server:9090/$(tenant_id)", + {"tenant_id": 'A'}, + ) def test_formatting_a_non_string(self): def _test(url_template): - self.assertRaises(exception.MalformedEndpoint, - utils.format_url, - url_template, - {}) + self.assertRaises( + exception.MalformedEndpoint, utils.format_url, url_template, {} + ) _test(None) _test(object()) @@ -61,13 +67,13 @@ class FormatUrlTests(unit.BaseTestCase): # If the url template contains a substitution that's not in the allowed # list then MalformedEndpoint is raised. # For example, admin_token isn't allowed. - url_template = ('http://server:9090/' - '$(project_id)s/$(user_id)s/$(admin_token)s') + url_template = ( + 'http://server:9090/' '$(project_id)s/$(user_id)s/$(admin_token)s' + ) values = {'user_id': 'B', 'admin_token': 'C'} - self.assertRaises(exception.MalformedEndpoint, - utils.format_url, - url_template, - values) + self.assertRaises( + exception.MalformedEndpoint, utils.format_url, url_template, values + ) def test_substitution_with_allowed_tenant_keyerror(self): # No value of 'tenant_id' is passed into url_template. @@ -76,11 +82,13 @@ class FormatUrlTests(unit.BaseTestCase): # This is intentional behavior since we don't want to skip # all the later endpoints once there is an URL of endpoint # trying to replace 'tenant_id' with None. - url_template = ('http://server:9090/' - '$(tenant_id)s/$(user_id)s') + url_template = 'http://server:9090/' '$(tenant_id)s/$(user_id)s' values = {'user_id': 'B'} - self.assertIsNone(utils.format_url(url_template, values, - silent_keyerror_failures=['tenant_id'])) + self.assertIsNone( + utils.format_url( + url_template, values, silent_keyerror_failures=['tenant_id'] + ) + ) def test_substitution_with_allowed_project_keyerror(self): # No value of 'project_id' is passed into url_template. @@ -89,8 +97,10 @@ class FormatUrlTests(unit.BaseTestCase): # This is intentional behavior since we don't want to skip # all the later endpoints once there is an URL of endpoint # trying to replace 'project_id' with None. - url_template = ('http://server:9090/' - '$(project_id)s/$(user_id)s') + url_template = 'http://server:9090/' '$(project_id)s/$(user_id)s' values = {'user_id': 'B'} - self.assertIsNone(utils.format_url(url_template, values, - silent_keyerror_failures=['project_id'])) + self.assertIsNone( + utils.format_url( + url_template, values, silent_keyerror_failures=['project_id'] + ) + ) diff --git a/keystone/tests/unit/common/sql/test_upgrades.py b/keystone/tests/unit/common/sql/test_upgrades.py index e56b0dfa0b..dbf0156855 100644 --- a/keystone/tests/unit/common/sql/test_upgrades.py +++ b/keystone/tests/unit/common/sql/test_upgrades.py @@ -84,8 +84,9 @@ class KeystoneModelsMigrationsSync(test_migrations.ModelsMigrationsSync): # Override keystone's context manager to be oslo.db's global context # manager. sql.core._TESTING_USE_GLOBAL_CONTEXT_MANAGER = True - self.addCleanup(setattr, - sql.core, '_TESTING_USE_GLOBAL_CONTEXT_MANAGER', False) + self.addCleanup( + setattr, sql.core, '_TESTING_USE_GLOBAL_CONTEXT_MANAGER', False + ) self.addCleanup(sql.cleanup) def db_sync(self, engine): diff --git a/keystone/tests/unit/common/test_cache.py b/keystone/tests/unit/common/test_cache.py index d51049924a..49cf7587b8 100644 --- a/keystone/tests/unit/common/test_cache.py +++ b/keystone/tests/unit/common/test_cache.py @@ -33,13 +33,15 @@ class TestCacheRegion(unit.BaseTestCase): # TODO(morganfainberg): Make Cache Testing a separate test case # in tempest, and move it out of the base unit tests. group='cache', - backend='dogpile.cache.memory') + backend='dogpile.cache.memory', + ) # replace existing backend since this may already be configured cache.CACHE_INVALIDATION_REGION.configure( backend='dogpile.cache.memory', expiration_time=None, - replace_existing_backend=True) + replace_existing_backend=True, + ) self.region_name = uuid.uuid4().hex self.region0 = cache.create_region('test_region') @@ -181,7 +183,8 @@ class TestCacheRegion(unit.BaseTestCase): the tests above can erroneosly pass that we need this sanity check. """ region_key = cache.RegionInvalidationManager( - None, self.region0.name)._region_key + None, self.region0.name + )._region_key key = uuid.uuid4().hex value = uuid.uuid4().hex diff --git a/keystone/tests/unit/common/test_database_conflicts.py b/keystone/tests/unit/common/test_database_conflicts.py index 50e1927425..93b722920d 100644 --- a/keystone/tests/unit/common/test_database_conflicts.py +++ b/keystone/tests/unit/common/test_database_conflicts.py @@ -63,8 +63,9 @@ class DuplicateTestCase(test_v3.RestfulTestCase): try: PROVIDERS.identity_api.create_user(user) except exception.Conflict as e: - self.assertIn("Duplicate entry found with name %s" % user['name'], - repr(e)) + self.assertIn( + "Duplicate entry found with name %s" % user['name'], repr(e) + ) else: self.fail("Create duplicate user did not raise a conflict") @@ -75,8 +76,9 @@ class DuplicateTestCase(test_v3.RestfulTestCase): try: PROVIDERS.role_api.create_role(role['id'], role) except exception.Conflict as e: - self.assertIn("Duplicate entry found with name %s" % role['name'], - repr(e)) + self.assertIn( + "Duplicate entry found with name %s" % role['name'], repr(e) + ) else: self.fail("Create duplicate role did not raise a conflict") @@ -86,8 +88,9 @@ class DuplicateTestCase(test_v3.RestfulTestCase): try: PROVIDERS.identity_api.create_group(group) except exception.Conflict as e: - self.assertIn("Duplicate entry found with name %s" - % group['name'], repr(e)) + self.assertIn( + "Duplicate entry found with name %s" % group['name'], repr(e) + ) else: self.fail("Create duplicate group did not raise a conflict") @@ -97,8 +100,10 @@ class DuplicateTestCase(test_v3.RestfulTestCase): try: PROVIDERS.policy_api.create_policy(policy_ref['id'], policy_ref) except exception.Conflict as e: - self.assertIn("Duplicate entry found with name %s" - % policy_ref['name'], repr(e)) + self.assertIn( + "Duplicate entry found with name %s" % policy_ref['name'], + repr(e), + ) else: self.fail("Create duplicate policy did not raise a conflict") @@ -113,8 +118,9 @@ class DuplicateTestCase(test_v3.RestfulTestCase): credential['id'], credential ) except exception.Conflict as e: - self.assertIn("Duplicate entry found with ID %s" - % credential['id'], repr(e)) + self.assertIn( + "Duplicate entry found with ID %s" % credential['id'], repr(e) + ) else: self.fail("Create duplicate credential did not raise a conflict") @@ -134,8 +140,9 @@ class DuplicateTestCase(test_v3.RestfulTestCase): trust_ref['id'], trust_ref, [role_ref] ) except exception.Conflict as e: - self.assertIn("Duplicate entry found with ID %s" - % trust_ref['id'], repr(e)) + self.assertIn( + "Duplicate entry found with ID %s" % trust_ref['id'], repr(e) + ) else: self.fail("Create duplicate trust did not raise a conflict") @@ -150,8 +157,10 @@ class DuplicateTestCase(test_v3.RestfulTestCase): self.mapping['id'], self.mapping ) except exception.Conflict as e: - self.assertIn("Duplicate entry found with ID %s" - % self.mapping['id'], repr(e)) + self.assertIn( + "Duplicate entry found with ID %s" % self.mapping['id'], + repr(e), + ) else: self.fail("Create duplicate mapping did not raise a conflict") @@ -166,8 +175,10 @@ class DuplicateTestCase(test_v3.RestfulTestCase): self.mapping['id'], self.mapping ) except exception.Conflict as e: - self.assertIn("Duplicate entry found with ID %s" - % self.mapping['id'], repr(e)) + self.assertIn( + "Duplicate entry found with ID %s" % self.mapping['id'], + repr(e), + ) # Any other exception will cause the test to fail def test_region_duplicate_conflict_gives_name(self): @@ -184,7 +195,7 @@ class DuplicateTestCase(test_v3.RestfulTestCase): self.idp = { 'id': uuid.uuid4().hex, 'enabled': True, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } PROVIDERS.federation_api.create_idp(self.idp['id'], self.idp) self.mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER @@ -192,10 +203,7 @@ class DuplicateTestCase(test_v3.RestfulTestCase): PROVIDERS.federation_api.create_mapping( self.mapping['id'], self.mapping ) - protocol = { - 'id': uuid.uuid4().hex, - 'mapping_id': self.mapping['id'] - } + protocol = {'id': uuid.uuid4().hex, 'mapping_id': self.mapping['id']} protocol_ret = PROVIDERS.federation_api.create_protocol( self.idp['id'], protocol['id'], protocol ) @@ -204,17 +212,21 @@ class DuplicateTestCase(test_v3.RestfulTestCase): self.idp['id'], protocol['id'], protocol ) except exception.Conflict as e: - self.assertIn("Duplicate entry found with ID %s" - % protocol_ret['id'], repr(e)) + self.assertIn( + "Duplicate entry found with ID %s" % protocol_ret['id'], + repr(e), + ) else: - self.fail("Create duplicate federation_protocol did not raise " - "a conflict") + self.fail( + "Create duplicate federation_protocol did not raise " + "a conflict" + ) def test_federation_protocol_duplicate_conflict_with_id_in_id(self): self.idp = { 'id': uuid.uuid4().hex, 'enabled': True, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } PROVIDERS.federation_api.create_idp(self.idp['id'], self.idp) self.mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER @@ -224,7 +236,7 @@ class DuplicateTestCase(test_v3.RestfulTestCase): ) protocol = { 'id': 'federation_protocol_with_id_in_the_id', - 'mapping_id': self.mapping['id'] + 'mapping_id': self.mapping['id'], } protocol_ret = PROVIDERS.federation_api.create_protocol( self.idp['id'], protocol['id'], protocol @@ -234,15 +246,17 @@ class DuplicateTestCase(test_v3.RestfulTestCase): self.idp['id'], protocol['id'], protocol ) except exception.Conflict as e: - self.assertIn("Duplicate entry found with ID %s" - % protocol_ret['id'], repr(e)) + self.assertIn( + "Duplicate entry found with ID %s" % protocol_ret['id'], + repr(e), + ) # Any other exception will fail the test def test_federation_protocol_duplicate_conflict_with_id_in_idp_id(self): self.idp = { 'id': 'myidp', 'enabled': True, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } PROVIDERS.federation_api.create_idp(self.idp['id'], self.idp) self.mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER @@ -250,10 +264,7 @@ class DuplicateTestCase(test_v3.RestfulTestCase): PROVIDERS.federation_api.create_mapping( self.mapping['id'], self.mapping ) - protocol = { - 'id': uuid.uuid4().hex, - 'mapping_id': self.mapping['id'] - } + protocol = {'id': uuid.uuid4().hex, 'mapping_id': self.mapping['id']} protocol_ret = PROVIDERS.federation_api.create_protocol( self.idp['id'], protocol['id'], protocol ) @@ -262,8 +273,10 @@ class DuplicateTestCase(test_v3.RestfulTestCase): self.idp['id'], protocol['id'], protocol ) except exception.Conflict as e: - self.assertIn("Duplicate entry found with ID %s" - % protocol_ret['id'], repr(e)) + self.assertIn( + "Duplicate entry found with ID %s" % protocol_ret['id'], + repr(e), + ) # Any other exception will fail the test def test_sp_duplicate_conflict_gives_name(self): @@ -278,7 +291,8 @@ class DuplicateTestCase(test_v3.RestfulTestCase): try: PROVIDERS.federation_api.create_sp('SP1', sp) except exception.Conflict as e: - self.assertIn("Duplicate entry found with ID %s" - % service_ref['id'], repr(e)) + self.assertIn( + "Duplicate entry found with ID %s" % service_ref['id'], repr(e) + ) else: self.fail("Create duplicate sp did not raise a conflict") diff --git a/keystone/tests/unit/common/test_json_home.py b/keystone/tests/unit/common/test_json_home.py index dd8af5a307..edbe20499d 100644 --- a/keystone/tests/unit/common/test_json_home.py +++ b/keystone/tests/unit/common/test_json_home.py @@ -26,8 +26,9 @@ class JsonHomeTest(unit.BaseTestCase): resource_name = self.getUniqueString() relation = json_home.build_v3_resource_relation(resource_name) exp_relation = ( - 'https://docs.openstack.org/api/openstack-identity/3/rel/%s' % - resource_name) + 'https://docs.openstack.org/api/openstack-identity/3/rel/%s' + % resource_name + ) self.assertThat(relation, matchers.Equals(exp_relation)) def test_build_v3_extension_resource_relation(self): @@ -35,18 +36,21 @@ class JsonHomeTest(unit.BaseTestCase): extension_version = self.getUniqueString() resource_name = self.getUniqueString() relation = json_home.build_v3_extension_resource_relation( - extension_name, extension_version, resource_name) + extension_name, extension_version, resource_name + ) exp_relation = ( 'https://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/rel' - '/%s' % (extension_name, extension_version, resource_name)) + '/%s' % (extension_name, extension_version, resource_name) + ) self.assertThat(relation, matchers.Equals(exp_relation)) def test_build_v3_parameter_relation(self): parameter_name = self.getUniqueString() relation = json_home.build_v3_parameter_relation(parameter_name) exp_relation = ( - 'https://docs.openstack.org/api/openstack-identity/3/param/%s' % - parameter_name) + 'https://docs.openstack.org/api/openstack-identity/3/param/%s' + % parameter_name + ) self.assertThat(relation, matchers.Equals(exp_relation)) def test_build_v3_extension_parameter_relation(self): @@ -54,10 +58,12 @@ class JsonHomeTest(unit.BaseTestCase): extension_version = self.getUniqueString() parameter_name = self.getUniqueString() relation = json_home.build_v3_extension_parameter_relation( - extension_name, extension_version, parameter_name) + extension_name, extension_version, parameter_name + ) exp_relation = ( 'https://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/' - 'param/%s' % (extension_name, extension_version, parameter_name)) + 'param/%s' % (extension_name, extension_version, parameter_name) + ) self.assertThat(relation, matchers.Equals(exp_relation)) def test_translate_urls(self): @@ -71,7 +77,8 @@ class JsonHomeTest(unit.BaseTestCase): href_rel: {'href': href}, href_template_rel: { 'href-template': href_template, - 'href-vars': href_vars} + 'href-vars': href_vars, + }, } } @@ -84,7 +91,8 @@ class JsonHomeTest(unit.BaseTestCase): href_rel: {'href': new_prefix + href}, href_template_rel: { 'href-template': new_prefix + href_template, - 'href-vars': href_vars} + 'href-vars': href_vars, + }, } } diff --git a/keystone/tests/unit/common/test_notifications.py b/keystone/tests/unit/common/test_notifications.py index 8b6d6df984..d4c7b1fbb3 100644 --- a/keystone/tests/unit/common/test_notifications.py +++ b/keystone/tests/unit/common/test_notifications.py @@ -51,8 +51,9 @@ class ArbitraryException(Exception): def register_callback(operation, resource_type=EXP_RESOURCE_TYPE): """Helper for creating and registering a mock callback.""" - callback = mock.Mock(__name__='callback', - im_class=mock.Mock(__name__='class')) + callback = mock.Mock( + __name__='callback', im_class=mock.Mock(__name__='class') + ) notifications.register_event_callback(operation, resource_type, callback) return callback @@ -63,58 +64,80 @@ class AuditNotificationsTestCase(unit.BaseTestCase): self.config_fixture = self.useFixture(config_fixture.Config(CONF)) self.addCleanup(notifications.clear_subscribers) - def _test_notification_operation_with_basic_format(self, - notify_function, - operation): + def _test_notification_operation_with_basic_format( + self, notify_function, operation + ): self.config_fixture.config(notification_format='basic') exp_resource_id = uuid.uuid4().hex callback = register_callback(operation) notify_function(EXP_RESOURCE_TYPE, exp_resource_id) - callback.assert_called_once_with('identity', EXP_RESOURCE_TYPE, - operation, - {'resource_info': exp_resource_id}) + callback.assert_called_once_with( + 'identity', + EXP_RESOURCE_TYPE, + operation, + {'resource_info': exp_resource_id}, + ) - def _test_notification_operation_with_cadf_format(self, - notify_function, - operation): + def _test_notification_operation_with_cadf_format( + self, notify_function, operation + ): self.config_fixture.config(notification_format='cadf') exp_resource_id = uuid.uuid4().hex with mock.patch( - 'keystone.notifications._create_cadf_payload') as cadf_notify: + 'keystone.notifications._create_cadf_payload' + ) as cadf_notify: notify_function(EXP_RESOURCE_TYPE, exp_resource_id) initiator = None reason = None cadf_notify.assert_called_once_with( - operation, EXP_RESOURCE_TYPE, exp_resource_id, - notifications.taxonomy.OUTCOME_SUCCESS, initiator, reason) + operation, + EXP_RESOURCE_TYPE, + exp_resource_id, + notifications.taxonomy.OUTCOME_SUCCESS, + initiator, + reason, + ) notify_function(EXP_RESOURCE_TYPE, exp_resource_id, public=False) cadf_notify.assert_called_once_with( - operation, EXP_RESOURCE_TYPE, exp_resource_id, - notifications.taxonomy.OUTCOME_SUCCESS, initiator, reason) + operation, + EXP_RESOURCE_TYPE, + exp_resource_id, + notifications.taxonomy.OUTCOME_SUCCESS, + initiator, + reason, + ) def test_resource_created_notification(self): self._test_notification_operation_with_basic_format( - notifications.Audit.created, CREATED_OPERATION) + notifications.Audit.created, CREATED_OPERATION + ) self._test_notification_operation_with_cadf_format( - notifications.Audit.created, CREATED_OPERATION) + notifications.Audit.created, CREATED_OPERATION + ) def test_resource_updated_notification(self): self._test_notification_operation_with_basic_format( - notifications.Audit.updated, UPDATED_OPERATION) + notifications.Audit.updated, UPDATED_OPERATION + ) self._test_notification_operation_with_cadf_format( - notifications.Audit.updated, UPDATED_OPERATION) + notifications.Audit.updated, UPDATED_OPERATION + ) def test_resource_deleted_notification(self): self._test_notification_operation_with_basic_format( - notifications.Audit.deleted, DELETED_OPERATION) + notifications.Audit.deleted, DELETED_OPERATION + ) self._test_notification_operation_with_cadf_format( - notifications.Audit.deleted, DELETED_OPERATION) + notifications.Audit.deleted, DELETED_OPERATION + ) def test_resource_disabled_notification(self): self._test_notification_operation_with_basic_format( - notifications.Audit.disabled, DISABLED_OPERATION) + notifications.Audit.disabled, DISABLED_OPERATION + ) self._test_notification_operation_with_cadf_format( - notifications.Audit.disabled, DISABLED_OPERATION) + notifications.Audit.disabled, DISABLED_OPERATION + ) class NotificationsTestCase(unit.BaseTestCase): @@ -150,13 +173,15 @@ class NotificationsTestCase(unit.BaseTestCase): expected_args = [ {}, # empty context 'identity.%s.created' % resource_type, # event_type - {'resource_info': resource} # payload + {'resource_info': resource}, # payload ] - with mock.patch.object(notifications._get_notifier(), - 'info') as mocked: - notifications._send_notification(operation, resource_type, - resource) + with mock.patch.object( + notifications._get_notifier(), 'info' + ) as mocked: + notifications._send_notification( + operation, resource_type, resource + ) mocked.assert_called_once_with(*expected_args) def test_send_notification_with_opt_out(self): @@ -177,11 +202,13 @@ class NotificationsTestCase(unit.BaseTestCase): conf = self.useFixture(config_fixture.Config(CONF)) conf.config(notification_opt_out=[event_type]) - with mock.patch.object(notifications._get_notifier(), - 'info') as mocked: + with mock.patch.object( + notifications._get_notifier(), 'info' + ) as mocked: - notifications._send_notification(operation, resource_type, - resource) + notifications._send_notification( + operation, resource_type, resource + ) mocked.assert_not_called() def test_send_audit_notification_with_opt_out(self): @@ -201,14 +228,13 @@ class NotificationsTestCase(unit.BaseTestCase): conf = self.useFixture(config_fixture.Config(CONF)) conf.config(notification_opt_out=[event_type]) - with mock.patch.object(notifications._get_notifier(), - 'info') as mocked: + with mock.patch.object( + notifications._get_notifier(), 'info' + ) as mocked: - notifications._send_audit_notification(action, - initiator, - outcome, - target, - event_type) + notifications._send_audit_notification( + action, initiator, outcome, target, event_type + ) mocked.assert_not_called() def test_opt_out_authenticate_event(self): @@ -225,14 +251,13 @@ class NotificationsTestCase(unit.BaseTestCase): conf = self.useFixture(config_fixture.Config(CONF)) conf.config(notification_opt_out=[meter_name]) - with mock.patch.object(notifications._get_notifier(), - 'info') as mocked: + with mock.patch.object( + notifications._get_notifier(), 'info' + ) as mocked: - notifications._send_audit_notification(action, - initiator, - outcome, - target, - event_type) + notifications._send_audit_notification( + action, initiator, outcome, target, event_type + ) mocked.assert_not_called() @@ -244,26 +269,43 @@ class BaseNotificationTest(test_v3.RestfulTestCase): self._notifications = [] self._audits = [] - def fake_notify(operation, resource_type, resource_id, initiator=None, - actor_dict=None, public=True): + def fake_notify( + operation, + resource_type, + resource_id, + initiator=None, + actor_dict=None, + public=True, + ): note = { 'resource_id': resource_id, 'operation': operation, 'resource_type': resource_type, 'initiator': initiator, 'send_notification_called': True, - 'public': public} + 'public': public, + } if actor_dict: note['actor_id'] = actor_dict.get('id') note['actor_type'] = actor_dict.get('type') note['actor_operation'] = actor_dict.get('actor_operation') self._notifications.append(note) - self.useFixture(fixtures.MockPatchObject( - notifications, '_send_notification', fake_notify)) + self.useFixture( + fixtures.MockPatchObject( + notifications, '_send_notification', fake_notify + ) + ) - def fake_audit(action, initiator, outcome, target, - event_type, reason=None, **kwargs): + def fake_audit( + action, + initiator, + outcome, + target, + event_type, + reason=None, + **kwargs + ): service_security = cadftaxonomy.SERVICE_SECURITY event = eventfactory.EventFactory().new_event( @@ -273,7 +315,8 @@ class BaseNotificationTest(test_v3.RestfulTestCase): initiator=initiator, target=target, reason=reason, - observer=cadfresource.Resource(typeURI=service_security)) + observer=cadfresource.Resource(typeURI=service_security), + ) for key, value in kwargs.items(): setattr(event, key, value) @@ -283,15 +326,25 @@ class BaseNotificationTest(test_v3.RestfulTestCase): audit = { 'payload': payload, 'event_type': event_type, - 'send_notification_called': True} + 'send_notification_called': True, + } self._audits.append(audit) - self.useFixture(fixtures.MockPatchObject( - notifications, '_send_audit_notification', fake_audit)) + self.useFixture( + fixtures.MockPatchObject( + notifications, '_send_audit_notification', fake_audit + ) + ) - def _assert_last_note(self, resource_id, operation, resource_type, - actor_id=None, actor_type=None, - actor_operation=None): + def _assert_last_note( + self, + resource_id, + operation, + resource_type, + actor_id=None, + actor_type=None, + actor_operation=None, + ): # NOTE(stevemar): If 'basic' format is not used, then simply # return since this assertion is not valid. if CONF.notification_format != 'basic': @@ -307,8 +360,9 @@ class BaseNotificationTest(test_v3.RestfulTestCase): self.assertEqual(actor_type, note['actor_type']) self.assertEqual(actor_operation, note['actor_operation']) - def _assert_last_audit(self, resource_id, operation, resource_type, - target_uri, reason=None): + def _assert_last_audit( + self, resource_id, operation, resource_type, target_uri, reason=None + ): # NOTE(stevemar): If 'cadf' format is not used, then simply # return since this assertion is not valid. if CONF.notification_format != 'cadf': @@ -323,15 +377,17 @@ class BaseNotificationTest(test_v3.RestfulTestCase): self.assertEqual(target_uri, payload['target']['typeURI']) if resource_id: self.assertEqual(resource_id, payload['target']['id']) - event_type = '.'.join(filter(None, ['identity', - resource_type, - operation])) + event_type = '.'.join( + filter(None, ['identity', resource_type, operation]) + ) self.assertEqual(event_type, audit['event_type']) if reason: - self.assertEqual(reason['reasonCode'], - payload['reason']['reasonCode']) - self.assertEqual(reason['reasonType'], - payload['reason']['reasonType']) + self.assertEqual( + reason['reasonCode'], payload['reason']['reasonCode'] + ) + self.assertEqual( + reason['reasonType'], payload['reason']['reasonType'] + ) self.assertTrue(audit['send_notification_called']) def _assert_initiator_data_is_set(self, operation, resource_type, typeURI): @@ -345,25 +401,29 @@ class BaseNotificationTest(test_v3.RestfulTestCase): action = '%s.%s' % (operation, resource_type) self.assertEqual(action, payload['action']) - def _assert_notify_not_sent(self, resource_id, operation, resource_type, - public=True): + def _assert_notify_not_sent( + self, resource_id, operation, resource_type, public=True + ): unexpected = { 'resource_id': resource_id, 'operation': operation, 'resource_type': resource_type, 'send_notification_called': True, - 'public': public} + 'public': public, + } for note in self._notifications: self.assertNotEqual(unexpected, note) - def _assert_notify_sent(self, resource_id, operation, resource_type, - public=True): + def _assert_notify_sent( + self, resource_id, operation, resource_type, public=True + ): expected = { 'resource_id': resource_id, 'operation': operation, 'resource_type': resource_type, 'send_notification_called': True, - 'public': public} + 'public': public, + } for note in self._notifications: # compare only expected fields if all(note.get(k) == v for k, v in expected.items()): @@ -378,30 +438,45 @@ class NotificationsForEntities(BaseNotificationTest): group_ref = unit.new_group_ref(domain_id=self.domain_id) group_ref = PROVIDERS.identity_api.create_group(group_ref) self._assert_last_note(group_ref['id'], CREATED_OPERATION, 'group') - self._assert_last_audit(group_ref['id'], CREATED_OPERATION, 'group', - cadftaxonomy.SECURITY_GROUP) + self._assert_last_audit( + group_ref['id'], + CREATED_OPERATION, + 'group', + cadftaxonomy.SECURITY_GROUP, + ) def test_create_project(self): project_ref = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) - self._assert_last_note( - project_ref['id'], CREATED_OPERATION, 'project') - self._assert_last_audit(project_ref['id'], CREATED_OPERATION, - 'project', cadftaxonomy.SECURITY_PROJECT) + self._assert_last_note(project_ref['id'], CREATED_OPERATION, 'project') + self._assert_last_audit( + project_ref['id'], + CREATED_OPERATION, + 'project', + cadftaxonomy.SECURITY_PROJECT, + ) def test_create_role(self): role_ref = unit.new_role_ref() PROVIDERS.role_api.create_role(role_ref['id'], role_ref) self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role') - self._assert_last_audit(role_ref['id'], CREATED_OPERATION, 'role', - cadftaxonomy.SECURITY_ROLE) + self._assert_last_audit( + role_ref['id'], + CREATED_OPERATION, + 'role', + cadftaxonomy.SECURITY_ROLE, + ) def test_create_user(self): user_ref = unit.new_user_ref(domain_id=self.domain_id) user_ref = PROVIDERS.identity_api.create_user(user_ref) self._assert_last_note(user_ref['id'], CREATED_OPERATION, 'user') - self._assert_last_audit(user_ref['id'], CREATED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER) + self._assert_last_audit( + user_ref['id'], + CREATED_OPERATION, + 'user', + cadftaxonomy.SECURITY_ACCOUNT_USER, + ) def test_create_trust(self): trustor = unit.new_user_ref(domain_id=self.domain_id) @@ -410,55 +485,78 @@ class NotificationsForEntities(BaseNotificationTest): trustee = PROVIDERS.identity_api.create_user(trustee) role_ref = unit.new_role_ref() PROVIDERS.role_api.create_role(role_ref['id'], role_ref) - trust_ref = unit.new_trust_ref(trustor['id'], - trustee['id']) + trust_ref = unit.new_trust_ref(trustor['id'], trustee['id']) PROVIDERS.trust_api.create_trust( trust_ref['id'], trust_ref, [role_ref] ) self._assert_last_note( - trust_ref['id'], CREATED_OPERATION, 'OS-TRUST:trust') - self._assert_last_audit(trust_ref['id'], CREATED_OPERATION, - 'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST) + trust_ref['id'], CREATED_OPERATION, 'OS-TRUST:trust' + ) + self._assert_last_audit( + trust_ref['id'], + CREATED_OPERATION, + 'OS-TRUST:trust', + cadftaxonomy.SECURITY_TRUST, + ) def test_delete_group(self): group_ref = unit.new_group_ref(domain_id=self.domain_id) group_ref = PROVIDERS.identity_api.create_group(group_ref) PROVIDERS.identity_api.delete_group(group_ref['id']) self._assert_last_note(group_ref['id'], DELETED_OPERATION, 'group') - self._assert_last_audit(group_ref['id'], DELETED_OPERATION, 'group', - cadftaxonomy.SECURITY_GROUP) + self._assert_last_audit( + group_ref['id'], + DELETED_OPERATION, + 'group', + cadftaxonomy.SECURITY_GROUP, + ) def test_delete_project(self): project_ref = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) PROVIDERS.resource_api.delete_project(project_ref['id']) - self._assert_last_note( - project_ref['id'], DELETED_OPERATION, 'project') - self._assert_last_audit(project_ref['id'], DELETED_OPERATION, - 'project', cadftaxonomy.SECURITY_PROJECT) + self._assert_last_note(project_ref['id'], DELETED_OPERATION, 'project') + self._assert_last_audit( + project_ref['id'], + DELETED_OPERATION, + 'project', + cadftaxonomy.SECURITY_PROJECT, + ) def test_delete_role(self): role_ref = unit.new_role_ref() PROVIDERS.role_api.create_role(role_ref['id'], role_ref) PROVIDERS.role_api.delete_role(role_ref['id']) self._assert_last_note(role_ref['id'], DELETED_OPERATION, 'role') - self._assert_last_audit(role_ref['id'], DELETED_OPERATION, 'role', - cadftaxonomy.SECURITY_ROLE) + self._assert_last_audit( + role_ref['id'], + DELETED_OPERATION, + 'role', + cadftaxonomy.SECURITY_ROLE, + ) def test_delete_user(self): user_ref = unit.new_user_ref(domain_id=self.domain_id) user_ref = PROVIDERS.identity_api.create_user(user_ref) PROVIDERS.identity_api.delete_user(user_ref['id']) self._assert_last_note(user_ref['id'], DELETED_OPERATION, 'user') - self._assert_last_audit(user_ref['id'], DELETED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER) + self._assert_last_audit( + user_ref['id'], + DELETED_OPERATION, + 'user', + cadftaxonomy.SECURITY_ACCOUNT_USER, + ) def test_create_domain(self): domain_ref = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain_ref['id'], domain_ref) self._assert_last_note(domain_ref['id'], CREATED_OPERATION, 'domain') - self._assert_last_audit(domain_ref['id'], CREATED_OPERATION, 'domain', - cadftaxonomy.SECURITY_DOMAIN) + self._assert_last_audit( + domain_ref['id'], + CREATED_OPERATION, + 'domain', + cadftaxonomy.SECURITY_DOMAIN, + ) def test_update_domain(self): domain_ref = unit.new_domain_ref() @@ -466,8 +564,12 @@ class NotificationsForEntities(BaseNotificationTest): domain_ref['description'] = uuid.uuid4().hex PROVIDERS.resource_api.update_domain(domain_ref['id'], domain_ref) self._assert_last_note(domain_ref['id'], UPDATED_OPERATION, 'domain') - self._assert_last_audit(domain_ref['id'], UPDATED_OPERATION, 'domain', - cadftaxonomy.SECURITY_DOMAIN) + self._assert_last_audit( + domain_ref['id'], + UPDATED_OPERATION, + 'domain', + cadftaxonomy.SECURITY_DOMAIN, + ) def test_delete_domain(self): domain_ref = unit.new_domain_ref() @@ -476,8 +578,12 @@ class NotificationsForEntities(BaseNotificationTest): PROVIDERS.resource_api.update_domain(domain_ref['id'], domain_ref) PROVIDERS.resource_api.delete_domain(domain_ref['id']) self._assert_last_note(domain_ref['id'], DELETED_OPERATION, 'domain') - self._assert_last_audit(domain_ref['id'], DELETED_OPERATION, 'domain', - cadftaxonomy.SECURITY_DOMAIN) + self._assert_last_audit( + domain_ref['id'], + DELETED_OPERATION, + 'domain', + cadftaxonomy.SECURITY_DOMAIN, + ) def test_delete_trust(self): trustor = unit.new_user_ref(domain_id=self.domain_id) @@ -491,127 +597,187 @@ class NotificationsForEntities(BaseNotificationTest): ) PROVIDERS.trust_api.delete_trust(trust_ref['id']) self._assert_last_note( - trust_ref['id'], DELETED_OPERATION, 'OS-TRUST:trust') - self._assert_last_audit(trust_ref['id'], DELETED_OPERATION, - 'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST) + trust_ref['id'], DELETED_OPERATION, 'OS-TRUST:trust' + ) + self._assert_last_audit( + trust_ref['id'], + DELETED_OPERATION, + 'OS-TRUST:trust', + cadftaxonomy.SECURITY_TRUST, + ) def test_create_endpoint(self): - endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) + endpoint_ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + ) PROVIDERS.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref) - self._assert_notify_sent(endpoint_ref['id'], CREATED_OPERATION, - 'endpoint') - self._assert_last_audit(endpoint_ref['id'], CREATED_OPERATION, - 'endpoint', cadftaxonomy.SECURITY_ENDPOINT) + self._assert_notify_sent( + endpoint_ref['id'], CREATED_OPERATION, 'endpoint' + ) + self._assert_last_audit( + endpoint_ref['id'], + CREATED_OPERATION, + 'endpoint', + cadftaxonomy.SECURITY_ENDPOINT, + ) def test_update_endpoint(self): - endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) + endpoint_ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + ) PROVIDERS.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref) PROVIDERS.catalog_api.update_endpoint(endpoint_ref['id'], endpoint_ref) - self._assert_notify_sent(endpoint_ref['id'], UPDATED_OPERATION, - 'endpoint') - self._assert_last_audit(endpoint_ref['id'], UPDATED_OPERATION, - 'endpoint', cadftaxonomy.SECURITY_ENDPOINT) + self._assert_notify_sent( + endpoint_ref['id'], UPDATED_OPERATION, 'endpoint' + ) + self._assert_last_audit( + endpoint_ref['id'], + UPDATED_OPERATION, + 'endpoint', + cadftaxonomy.SECURITY_ENDPOINT, + ) def test_delete_endpoint(self): - endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) + endpoint_ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + ) PROVIDERS.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref) PROVIDERS.catalog_api.delete_endpoint(endpoint_ref['id']) - self._assert_notify_sent(endpoint_ref['id'], DELETED_OPERATION, - 'endpoint') - self._assert_last_audit(endpoint_ref['id'], DELETED_OPERATION, - 'endpoint', cadftaxonomy.SECURITY_ENDPOINT) + self._assert_notify_sent( + endpoint_ref['id'], DELETED_OPERATION, 'endpoint' + ) + self._assert_last_audit( + endpoint_ref['id'], + DELETED_OPERATION, + 'endpoint', + cadftaxonomy.SECURITY_ENDPOINT, + ) def test_create_service(self): service_ref = unit.new_service_ref() PROVIDERS.catalog_api.create_service(service_ref['id'], service_ref) - self._assert_notify_sent(service_ref['id'], CREATED_OPERATION, - 'service') - self._assert_last_audit(service_ref['id'], CREATED_OPERATION, - 'service', cadftaxonomy.SECURITY_SERVICE) + self._assert_notify_sent( + service_ref['id'], CREATED_OPERATION, 'service' + ) + self._assert_last_audit( + service_ref['id'], + CREATED_OPERATION, + 'service', + cadftaxonomy.SECURITY_SERVICE, + ) def test_update_service(self): service_ref = unit.new_service_ref() PROVIDERS.catalog_api.create_service(service_ref['id'], service_ref) PROVIDERS.catalog_api.update_service(service_ref['id'], service_ref) - self._assert_notify_sent(service_ref['id'], UPDATED_OPERATION, - 'service') - self._assert_last_audit(service_ref['id'], UPDATED_OPERATION, - 'service', cadftaxonomy.SECURITY_SERVICE) + self._assert_notify_sent( + service_ref['id'], UPDATED_OPERATION, 'service' + ) + self._assert_last_audit( + service_ref['id'], + UPDATED_OPERATION, + 'service', + cadftaxonomy.SECURITY_SERVICE, + ) def test_delete_service(self): service_ref = unit.new_service_ref() PROVIDERS.catalog_api.create_service(service_ref['id'], service_ref) PROVIDERS.catalog_api.delete_service(service_ref['id']) - self._assert_notify_sent(service_ref['id'], DELETED_OPERATION, - 'service') - self._assert_last_audit(service_ref['id'], DELETED_OPERATION, - 'service', cadftaxonomy.SECURITY_SERVICE) + self._assert_notify_sent( + service_ref['id'], DELETED_OPERATION, 'service' + ) + self._assert_last_audit( + service_ref['id'], + DELETED_OPERATION, + 'service', + cadftaxonomy.SECURITY_SERVICE, + ) def test_create_region(self): region_ref = unit.new_region_ref() PROVIDERS.catalog_api.create_region(region_ref) - self._assert_notify_sent(region_ref['id'], CREATED_OPERATION, - 'region') - self._assert_last_audit(region_ref['id'], CREATED_OPERATION, - 'region', cadftaxonomy.SECURITY_REGION) + self._assert_notify_sent(region_ref['id'], CREATED_OPERATION, 'region') + self._assert_last_audit( + region_ref['id'], + CREATED_OPERATION, + 'region', + cadftaxonomy.SECURITY_REGION, + ) def test_update_region(self): region_ref = unit.new_region_ref() PROVIDERS.catalog_api.create_region(region_ref) PROVIDERS.catalog_api.update_region(region_ref['id'], region_ref) - self._assert_notify_sent(region_ref['id'], UPDATED_OPERATION, - 'region') - self._assert_last_audit(region_ref['id'], UPDATED_OPERATION, - 'region', cadftaxonomy.SECURITY_REGION) + self._assert_notify_sent(region_ref['id'], UPDATED_OPERATION, 'region') + self._assert_last_audit( + region_ref['id'], + UPDATED_OPERATION, + 'region', + cadftaxonomy.SECURITY_REGION, + ) def test_delete_region(self): region_ref = unit.new_region_ref() PROVIDERS.catalog_api.create_region(region_ref) PROVIDERS.catalog_api.delete_region(region_ref['id']) - self._assert_notify_sent(region_ref['id'], DELETED_OPERATION, - 'region') - self._assert_last_audit(region_ref['id'], DELETED_OPERATION, - 'region', cadftaxonomy.SECURITY_REGION) + self._assert_notify_sent(region_ref['id'], DELETED_OPERATION, 'region') + self._assert_last_audit( + region_ref['id'], + DELETED_OPERATION, + 'region', + cadftaxonomy.SECURITY_REGION, + ) def test_create_policy(self): policy_ref = unit.new_policy_ref() PROVIDERS.policy_api.create_policy(policy_ref['id'], policy_ref) - self._assert_notify_sent(policy_ref['id'], CREATED_OPERATION, - 'policy') - self._assert_last_audit(policy_ref['id'], CREATED_OPERATION, - 'policy', cadftaxonomy.SECURITY_POLICY) + self._assert_notify_sent(policy_ref['id'], CREATED_OPERATION, 'policy') + self._assert_last_audit( + policy_ref['id'], + CREATED_OPERATION, + 'policy', + cadftaxonomy.SECURITY_POLICY, + ) def test_update_policy(self): policy_ref = unit.new_policy_ref() PROVIDERS.policy_api.create_policy(policy_ref['id'], policy_ref) PROVIDERS.policy_api.update_policy(policy_ref['id'], policy_ref) - self._assert_notify_sent(policy_ref['id'], UPDATED_OPERATION, - 'policy') - self._assert_last_audit(policy_ref['id'], UPDATED_OPERATION, - 'policy', cadftaxonomy.SECURITY_POLICY) + self._assert_notify_sent(policy_ref['id'], UPDATED_OPERATION, 'policy') + self._assert_last_audit( + policy_ref['id'], + UPDATED_OPERATION, + 'policy', + cadftaxonomy.SECURITY_POLICY, + ) def test_delete_policy(self): policy_ref = unit.new_policy_ref() PROVIDERS.policy_api.create_policy(policy_ref['id'], policy_ref) PROVIDERS.policy_api.delete_policy(policy_ref['id']) - self._assert_notify_sent(policy_ref['id'], DELETED_OPERATION, - 'policy') - self._assert_last_audit(policy_ref['id'], DELETED_OPERATION, - 'policy', cadftaxonomy.SECURITY_POLICY) + self._assert_notify_sent(policy_ref['id'], DELETED_OPERATION, 'policy') + self._assert_last_audit( + policy_ref['id'], + DELETED_OPERATION, + 'policy', + cadftaxonomy.SECURITY_POLICY, + ) def test_disable_domain(self): domain_ref = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain_ref['id'], domain_ref) domain_ref['enabled'] = False PROVIDERS.resource_api.update_domain(domain_ref['id'], domain_ref) - self._assert_notify_sent(domain_ref['id'], 'disabled', 'domain', - public=False) + self._assert_notify_sent( + domain_ref['id'], 'disabled', 'domain', public=False + ) def test_disable_of_disabled_domain_does_not_notify(self): domain_ref = unit.new_domain_ref(enabled=False) @@ -619,51 +785,63 @@ class NotificationsForEntities(BaseNotificationTest): # The domain_ref above is not changed during the create process. We # can use the same ref to perform the update. PROVIDERS.resource_api.update_domain(domain_ref['id'], domain_ref) - self._assert_notify_not_sent(domain_ref['id'], 'disabled', 'domain', - public=False) + self._assert_notify_not_sent( + domain_ref['id'], 'disabled', 'domain', public=False + ) def test_update_group(self): group_ref = unit.new_group_ref(domain_id=self.domain_id) group_ref = PROVIDERS.identity_api.create_group(group_ref) PROVIDERS.identity_api.update_group(group_ref['id'], group_ref) self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group') - self._assert_last_audit(group_ref['id'], UPDATED_OPERATION, 'group', - cadftaxonomy.SECURITY_GROUP) + self._assert_last_audit( + group_ref['id'], + UPDATED_OPERATION, + 'group', + cadftaxonomy.SECURITY_GROUP, + ) def test_update_project(self): project_ref = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) PROVIDERS.resource_api.update_project(project_ref['id'], project_ref) self._assert_notify_sent( - project_ref['id'], UPDATED_OPERATION, 'project', public=True) - self._assert_last_audit(project_ref['id'], UPDATED_OPERATION, - 'project', cadftaxonomy.SECURITY_PROJECT) + project_ref['id'], UPDATED_OPERATION, 'project', public=True + ) + self._assert_last_audit( + project_ref['id'], + UPDATED_OPERATION, + 'project', + cadftaxonomy.SECURITY_PROJECT, + ) def test_disable_project(self): project_ref = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) project_ref['enabled'] = False PROVIDERS.resource_api.update_project(project_ref['id'], project_ref) - self._assert_notify_sent(project_ref['id'], 'disabled', 'project', - public=False) + self._assert_notify_sent( + project_ref['id'], 'disabled', 'project', public=False + ) def test_disable_of_disabled_project_does_not_notify(self): - project_ref = unit.new_project_ref(domain_id=self.domain_id, - enabled=False) + project_ref = unit.new_project_ref( + domain_id=self.domain_id, enabled=False + ) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) # The project_ref above is not changed during the create process. We # can use the same ref to perform the update. PROVIDERS.resource_api.update_project(project_ref['id'], project_ref) - self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project', - public=False) + self._assert_notify_not_sent( + project_ref['id'], 'disabled', 'project', public=False + ) def test_update_project_does_not_send_disable(self): project_ref = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) project_ref['enabled'] = True PROVIDERS.resource_api.update_project(project_ref['id'], project_ref) - self._assert_last_note( - project_ref['id'], UPDATED_OPERATION, 'project') + self._assert_last_note(project_ref['id'], UPDATED_OPERATION, 'project') self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project') def test_update_role(self): @@ -671,16 +849,24 @@ class NotificationsForEntities(BaseNotificationTest): PROVIDERS.role_api.create_role(role_ref['id'], role_ref) PROVIDERS.role_api.update_role(role_ref['id'], role_ref) self._assert_last_note(role_ref['id'], UPDATED_OPERATION, 'role') - self._assert_last_audit(role_ref['id'], UPDATED_OPERATION, 'role', - cadftaxonomy.SECURITY_ROLE) + self._assert_last_audit( + role_ref['id'], + UPDATED_OPERATION, + 'role', + cadftaxonomy.SECURITY_ROLE, + ) def test_update_user(self): user_ref = unit.new_user_ref(domain_id=self.domain_id) user_ref = PROVIDERS.identity_api.create_user(user_ref) PROVIDERS.identity_api.update_user(user_ref['id'], user_ref) self._assert_last_note(user_ref['id'], UPDATED_OPERATION, 'user') - self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER) + self._assert_last_audit( + user_ref['id'], + UPDATED_OPERATION, + 'user', + cadftaxonomy.SECURITY_ACCOUNT_USER, + ) def test_config_option_no_events(self): self.config_fixture.config(notification_format='basic') @@ -700,9 +886,14 @@ class NotificationsForEntities(BaseNotificationTest): PROVIDERS.identity_api.add_user_to_group( user_ref['id'], group_ref['id'] ) - self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group', - actor_id=user_ref['id'], actor_type='user', - actor_operation='added') + self._assert_last_note( + group_ref['id'], + UPDATED_OPERATION, + 'group', + actor_id=user_ref['id'], + actor_type='user', + actor_operation='added', + ) def test_remove_user_from_group(self): user_ref = unit.new_user_ref(domain_id=self.domain_id) @@ -715,9 +906,14 @@ class NotificationsForEntities(BaseNotificationTest): PROVIDERS.identity_api.remove_user_from_group( user_ref['id'], group_ref['id'] ) - self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group', - actor_id=user_ref['id'], actor_type='user', - actor_operation='removed') + self._assert_last_note( + group_ref['id'], + UPDATED_OPERATION, + 'group', + actor_id=user_ref['id'], + actor_type='user', + actor_operation='removed', + ) def test_initiator_request_id(self): ref = unit.new_domain_ref() @@ -729,12 +925,14 @@ class NotificationsForEntities(BaseNotificationTest): def test_initiator_global_request_id(self): global_request_id = 'req-%s' % uuid.uuid4() ref = unit.new_domain_ref() - self.post('/domains', body={'domain': ref}, - headers={'X-OpenStack-Request-Id': global_request_id}) + self.post( + '/domains', + body={'domain': ref}, + headers={'X-OpenStack-Request-Id': global_request_id}, + ) note = self._notifications[-1] initiator = note['initiator'] - self.assertEqual( - initiator.global_request_id, global_request_id) + self.assertEqual(initiator.global_request_id, global_request_id) def test_initiator_global_request_id_not_set(self): ref = unit.new_domain_ref() @@ -750,25 +948,26 @@ class CADFNotificationsForPCIDSSEvents(BaseNotificationTest): super(CADFNotificationsForPCIDSSEvents, self).setUp() conf = self.useFixture(config_fixture.Config(CONF)) conf.config(notification_format='cadf') - conf.config(group='security_compliance', - password_expires_days=2) - conf.config(group='security_compliance', - lockout_failure_attempts=3) - conf.config(group='security_compliance', - unique_last_password_count=2) - conf.config(group='security_compliance', - minimum_password_age=2) - conf.config(group='security_compliance', - password_regex=r'^(?=.*\d)(?=.*[a-zA-Z]).{7,}$') - conf.config(group='security_compliance', - password_regex_description='1 letter, 1 digit, 7 chars') + conf.config(group='security_compliance', password_expires_days=2) + conf.config(group='security_compliance', lockout_failure_attempts=3) + conf.config(group='security_compliance', unique_last_password_count=2) + conf.config(group='security_compliance', minimum_password_age=2) + conf.config( + group='security_compliance', + password_regex=r'^(?=.*\d)(?=.*[a-zA-Z]).{7,}$', + ) + conf.config( + group='security_compliance', + password_regex_description='1 letter, 1 digit, 7 chars', + ) def test_password_expired_sends_notification(self): password = uuid.uuid4().hex password_creation_time = ( - datetime.datetime.utcnow() - - datetime.timedelta( - days=CONF.security_compliance.password_expires_days + 1) + datetime.datetime.utcnow() + - datetime.timedelta( + days=CONF.security_compliance.password_expires_days + 1 + ) ) freezer = freezegun.freeze_time(password_creation_time) @@ -778,137 +977,180 @@ class CADFNotificationsForPCIDSSEvents(BaseNotificationTest): # then stop the time machine and return to the present time, # where the user's password is now expired. freezer.start() - user_ref = unit.new_user_ref(domain_id=self.domain_id, - password=password) + user_ref = unit.new_user_ref( + domain_id=self.domain_id, password=password + ) user_ref = PROVIDERS.identity_api.create_user(user_ref) with self.make_request(): PROVIDERS.identity_api.authenticate(user_ref['id'], password) freezer.stop() - reason_type = (exception.PasswordExpired.message_format % - {'user_id': user_ref['id']}) - expected_reason = {'reasonCode': '401', - 'reasonType': reason_type} + reason_type = exception.PasswordExpired.message_format % { + 'user_id': user_ref['id'] + } + expected_reason = {'reasonCode': '401', 'reasonType': reason_type} with self.make_request(): - self.assertRaises(exception.PasswordExpired, - PROVIDERS.identity_api.authenticate, - user_id=user_ref['id'], - password=password) - self._assert_last_audit(None, 'authenticate', None, - cadftaxonomy.ACCOUNT_USER, - reason=expected_reason) + self.assertRaises( + exception.PasswordExpired, + PROVIDERS.identity_api.authenticate, + user_id=user_ref['id'], + password=password, + ) + self._assert_last_audit( + None, + 'authenticate', + None, + cadftaxonomy.ACCOUNT_USER, + reason=expected_reason, + ) def test_locked_out_user_sends_notification(self): password = uuid.uuid4().hex new_password = uuid.uuid4().hex - expected_responses = [AssertionError, AssertionError, AssertionError, - exception.Unauthorized] - user_ref = unit.new_user_ref(domain_id=self.domain_id, - password=password) + expected_responses = [ + AssertionError, + AssertionError, + AssertionError, + exception.Unauthorized, + ] + user_ref = unit.new_user_ref( + domain_id=self.domain_id, password=password + ) user_ref = PROVIDERS.identity_api.create_user(user_ref) - reason_type = (exception.AccountLocked.message_format % - {'user_id': user_ref['id']}) - expected_reason = {'reasonCode': '401', - 'reasonType': reason_type} + reason_type = exception.AccountLocked.message_format % { + 'user_id': user_ref['id'] + } + expected_reason = {'reasonCode': '401', 'reasonType': reason_type} for ex in expected_responses: with self.make_request(): - self.assertRaises(ex, - PROVIDERS.identity_api.change_password, - user_id=user_ref['id'], - original_password=new_password, - new_password=new_password) + self.assertRaises( + ex, + PROVIDERS.identity_api.change_password, + user_id=user_ref['id'], + original_password=new_password, + new_password=new_password, + ) - self._assert_last_audit(None, 'authenticate', None, - cadftaxonomy.ACCOUNT_USER, - reason=expected_reason) + self._assert_last_audit( + None, + 'authenticate', + None, + cadftaxonomy.ACCOUNT_USER, + reason=expected_reason, + ) def test_repeated_password_sends_notification(self): conf = self.useFixture(config_fixture.Config(CONF)) - conf.config(group='security_compliance', - minimum_password_age=0) + conf.config(group='security_compliance', minimum_password_age=0) password = uuid.uuid4().hex new_password = uuid.uuid4().hex count = CONF.security_compliance.unique_last_password_count - reason_type = (exception.PasswordHistoryValidationError.message_format - % {'unique_count': count}) - expected_reason = {'reasonCode': '400', - 'reasonType': reason_type} - user_ref = unit.new_user_ref(domain_id=self.domain_id, - password=password) + reason_type = ( + exception.PasswordHistoryValidationError.message_format + % {'unique_count': count} + ) + expected_reason = {'reasonCode': '400', 'reasonType': reason_type} + user_ref = unit.new_user_ref( + domain_id=self.domain_id, password=password + ) user_ref = PROVIDERS.identity_api.create_user(user_ref) with self.make_request(): PROVIDERS.identity_api.change_password( user_id=user_ref['id'], - original_password=password, new_password=new_password + original_password=password, + new_password=new_password, ) with self.make_request(): - self.assertRaises(exception.PasswordValidationError, - PROVIDERS.identity_api.change_password, - user_id=user_ref['id'], - original_password=new_password, - new_password=password) + self.assertRaises( + exception.PasswordValidationError, + PROVIDERS.identity_api.change_password, + user_id=user_ref['id'], + original_password=new_password, + new_password=password, + ) - self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER, - reason=expected_reason) + self._assert_last_audit( + user_ref['id'], + UPDATED_OPERATION, + 'user', + cadftaxonomy.SECURITY_ACCOUNT_USER, + reason=expected_reason, + ) def test_invalid_password_sends_notification(self): password = uuid.uuid4().hex invalid_password = '1' regex = CONF.security_compliance.password_regex_description - reason_type = (exception.PasswordRequirementsValidationError - .message_format % - {'detail': regex}) - expected_reason = {'reasonCode': '400', - 'reasonType': reason_type} - user_ref = unit.new_user_ref(domain_id=self.domain_id, - password=password) + reason_type = ( + exception.PasswordRequirementsValidationError.message_format + % {'detail': regex} + ) + expected_reason = {'reasonCode': '400', 'reasonType': reason_type} + user_ref = unit.new_user_ref( + domain_id=self.domain_id, password=password + ) user_ref = PROVIDERS.identity_api.create_user(user_ref) with self.make_request(): - self.assertRaises(exception.PasswordValidationError, - PROVIDERS.identity_api.change_password, - user_id=user_ref['id'], - original_password=password, - new_password=invalid_password) + self.assertRaises( + exception.PasswordValidationError, + PROVIDERS.identity_api.change_password, + user_id=user_ref['id'], + original_password=password, + new_password=invalid_password, + ) - self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER, - reason=expected_reason) + self._assert_last_audit( + user_ref['id'], + UPDATED_OPERATION, + 'user', + cadftaxonomy.SECURITY_ACCOUNT_USER, + reason=expected_reason, + ) def test_changing_password_too_early_sends_notification(self): password = uuid.uuid4().hex new_password = uuid.uuid4().hex next_password = uuid.uuid4().hex - user_ref = unit.new_user_ref(domain_id=self.domain_id, - password=password, - password_created_at=( - datetime.datetime.utcnow())) + user_ref = unit.new_user_ref( + domain_id=self.domain_id, + password=password, + password_created_at=(datetime.datetime.utcnow()), + ) user_ref = PROVIDERS.identity_api.create_user(user_ref) min_days = CONF.security_compliance.minimum_password_age - min_age = (user_ref['password_created_at'] + - datetime.timedelta(days=min_days)) + min_age = user_ref['password_created_at'] + datetime.timedelta( + days=min_days + ) days_left = (min_age - datetime.datetime.utcnow()).days - reason_type = (exception.PasswordAgeValidationError.message_format % - {'min_age_days': min_days, 'days_left': days_left}) - expected_reason = {'reasonCode': '400', - 'reasonType': reason_type} + reason_type = exception.PasswordAgeValidationError.message_format % { + 'min_age_days': min_days, + 'days_left': days_left, + } + expected_reason = {'reasonCode': '400', 'reasonType': reason_type} with self.make_request(): PROVIDERS.identity_api.change_password( user_id=user_ref['id'], - original_password=password, new_password=new_password + original_password=password, + new_password=new_password, ) with self.make_request(): - self.assertRaises(exception.PasswordValidationError, - PROVIDERS.identity_api.change_password, - user_id=user_ref['id'], - original_password=new_password, - new_password=next_password) + self.assertRaises( + exception.PasswordValidationError, + PROVIDERS.identity_api.change_password, + user_id=user_ref['id'], + original_password=new_password, + new_password=next_password, + ) - self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER, - reason=expected_reason) + self._assert_last_audit( + user_ref['id'], + UPDATED_OPERATION, + 'user', + cadftaxonomy.SECURITY_ACCOUNT_USER, + reason=expected_reason, + ) class CADFNotificationsForEntities(NotificationsForEntities): @@ -921,16 +1163,20 @@ class CADFNotificationsForEntities(NotificationsForEntities): ref = unit.new_domain_ref() resp = self.post('/domains', body={'domain': ref}) resource_id = resp.result.get('domain').get('id') - self._assert_last_audit(resource_id, CREATED_OPERATION, 'domain', - cadftaxonomy.SECURITY_DOMAIN) - self._assert_initiator_data_is_set(CREATED_OPERATION, - 'domain', - cadftaxonomy.SECURITY_DOMAIN) + self._assert_last_audit( + resource_id, + CREATED_OPERATION, + 'domain', + cadftaxonomy.SECURITY_DOMAIN, + ) + self._assert_initiator_data_is_set( + CREATED_OPERATION, 'domain', cadftaxonomy.SECURITY_DOMAIN + ) def test_initiator_request_id(self): data = self.build_authentication_request( - user_id=self.user_id, - password=self.user['password']) + user_id=self.user_id, password=self.user['password'] + ) self.post('/auth/tokens', body=data) audit = self._audits[-1] initiator = audit['payload']['initiator'] @@ -939,20 +1185,21 @@ class CADFNotificationsForEntities(NotificationsForEntities): def test_initiator_global_request_id(self): global_request_id = 'req-%s' % uuid.uuid4() data = self.build_authentication_request( - user_id=self.user_id, - password=self.user['password']) + user_id=self.user_id, password=self.user['password'] + ) self.post( - '/auth/tokens', body=data, - headers={'X-OpenStack-Request-Id': global_request_id}) + '/auth/tokens', + body=data, + headers={'X-OpenStack-Request-Id': global_request_id}, + ) audit = self._audits[-1] initiator = audit['payload']['initiator'] - self.assertEqual( - initiator['global_request_id'], global_request_id) + self.assertEqual(initiator['global_request_id'], global_request_id) def test_initiator_global_request_id_not_set(self): data = self.build_authentication_request( - user_id=self.user_id, - password=self.user['password']) + user_id=self.user_id, password=self.user['password'] + ) self.post('/auth/tokens', body=data) audit = self._audits[-1] initiator = audit['payload']['initiator'] @@ -963,8 +1210,9 @@ class TestEventCallbacks(test_v3.RestfulTestCase): class FakeManager(object): - def _project_deleted_callback(self, service, resource_type, operation, - payload): + def _project_deleted_callback( + self, service, resource_type, operation, payload + ): """Used just for the callback interface.""" def test_notification_received(self): @@ -975,19 +1223,23 @@ class TestEventCallbacks(test_v3.RestfulTestCase): def test_notification_method_not_callable(self): fake_method = None - self.assertRaises(TypeError, - notifications.register_event_callback, - UPDATED_OPERATION, - 'project', - [fake_method]) + self.assertRaises( + TypeError, + notifications.register_event_callback, + UPDATED_OPERATION, + 'project', + [fake_method], + ) def test_notification_event_not_valid(self): manager = self.FakeManager() - self.assertRaises(ValueError, - notifications.register_event_callback, - uuid.uuid4().hex, - 'project', - manager._project_deleted_callback) + self.assertRaises( + ValueError, + notifications.register_event_callback, + uuid.uuid4().hex, + 'project', + manager._project_deleted_callback, + ) def test_event_registration_for_unknown_resource_type(self): # Registration for unknown resource types should succeed. If no event @@ -998,12 +1250,12 @@ class TestEventCallbacks(test_v3.RestfulTestCase): notifications.register_event_callback( DELETED_OPERATION, uuid.uuid4().hex, - manager._project_deleted_callback) + manager._project_deleted_callback, + ) resource_type = uuid.uuid4().hex notifications.register_event_callback( - DELETED_OPERATION, - resource_type, - manager._project_deleted_callback) + DELETED_OPERATION, resource_type, manager._project_deleted_callback + ) def test_provider_event_callback_subscription(self): callback_called = [] @@ -1012,10 +1264,10 @@ class TestEventCallbacks(test_v3.RestfulTestCase): class Foo(object): def __init__(self): self.event_callbacks = { - CREATED_OPERATION: {'project': self.foo_callback}} + CREATED_OPERATION: {'project': self.foo_callback} + } - def foo_callback(self, service, resource_type, operation, - payload): + def foo_callback(self, service, resource_type, operation, payload): # uses callback_called from the closure callback_called.append(True) @@ -1032,7 +1284,9 @@ class TestEventCallbacks(test_v3.RestfulTestCase): def __init__(self): self.event_callbacks = { CREATED_OPERATION: { - 'project': [self.callback_0, self.callback_1]}} + 'project': [self.callback_0, self.callback_1] + } + } def callback_0(self, service, resource_type, operation, payload): # uses callback_called from the closure @@ -1075,8 +1329,9 @@ class TestEventCallbacks(test_v3.RestfulTestCase): @notifications.listener class Foo(object): def __init__(self): - self.event_callbacks = {CREATED_OPERATION: - {'project': Foo.callback}} + self.event_callbacks = { + CREATED_OPERATION: {'project': Foo.callback} + } def callback(self, service, resource_type, operation, payload): pass @@ -1086,8 +1341,12 @@ class TestEventCallbacks(test_v3.RestfulTestCase): # self.assertRaises(TypeError, Foo) Foo() project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.assertRaises(TypeError, PROVIDERS.resource_api.create_project, - project_ref['id'], project_ref) + self.assertRaises( + TypeError, + PROVIDERS.resource_api.create_project, + project_ref['id'], + project_ref, + ) class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): @@ -1100,8 +1359,15 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): super(CadfNotificationsWrapperTestCase, self).setUp() self._notifications = [] - def fake_notify(action, initiator, outcome, target, - event_type, reason=None, **kwargs): + def fake_notify( + action, + initiator, + outcome, + target, + event_type, + reason=None, + **kwargs + ): service_security = cadftaxonomy.SERVICE_SECURITY event = eventfactory.EventFactory().new_event( @@ -1111,7 +1377,8 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): initiator=initiator, target=target, reason=reason, - observer=cadfresource.Resource(typeURI=service_security)) + observer=cadfresource.Resource(typeURI=service_security), + ) for key, value in kwargs.items(): setattr(event, key, value) @@ -1121,11 +1388,15 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): 'initiator': initiator, 'event': event, 'event_type': event_type, - 'send_notification_called': True} + 'send_notification_called': True, + } self._notifications.append(note) - self.useFixture(fixtures.MockPatchObject( - notifications, '_send_audit_notification', fake_notify)) + self.useFixture( + fixtures.MockPatchObject( + notifications, '_send_audit_notification', fake_notify + ) + ) def _get_last_note(self): self.assertTrue(self._notifications) @@ -1142,8 +1413,15 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): if event_type: self.assertEqual(event_type, note['event_type']) - def _assert_event(self, role_id, project=None, domain=None, - user=None, group=None, inherit=False): + def _assert_event( + self, + role_id, + project=None, + domain=None, + user=None, + group=None, + inherit=False, + ): """Assert that the CADF event is valid. In the case of role assignments, the event will have extra data, @@ -1218,27 +1496,30 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): user_name = self.user['name'] password = self.user['password'] domain_id = self.domain_id - data = self.build_authentication_request(username=user_name, - user_domain_id=domain_id, - password=password) + data = self.build_authentication_request( + username=user_name, user_domain_id=domain_id, password=password + ) self.post('/auth/tokens', body=data) self._assert_last_note(self.ACTION, user_id) def test_v3_authenticate_user_id(self): user_id = self.user_id password = self.user['password'] - data = self.build_authentication_request(user_id=user_id, - password=password) + data = self.build_authentication_request( + user_id=user_id, password=password + ) self.post('/auth/tokens', body=data) self._assert_last_note(self.ACTION, user_id) def test_v3_authenticate_with_invalid_user_id_sends_notification(self): user_id = uuid.uuid4().hex password = self.user['password'] - data = self.build_authentication_request(user_id=user_id, - password=password) - self.post('/auth/tokens', body=data, - expected_status=http.client.UNAUTHORIZED) + data = self.build_authentication_request( + user_id=user_id, password=password + ) + self.post( + '/auth/tokens', body=data, expected_status=http.client.UNAUTHORIZED + ) note = self._get_last_note() initiator = note['initiator'] @@ -1253,11 +1534,12 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): user_name = uuid.uuid4().hex password = self.user['password'] domain_id = self.domain_id - data = self.build_authentication_request(username=user_name, - user_domain_id=domain_id, - password=password) - self.post('/auth/tokens', body=data, - expected_status=http.client.UNAUTHORIZED) + data = self.build_authentication_request( + username=user_name, user_domain_id=domain_id, password=password + ) + self.post( + '/auth/tokens', body=data, expected_status=http.client.UNAUTHORIZED + ) note = self._get_last_note() initiator = note['initiator'] @@ -1274,43 +1556,56 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): user_name = self.user['name'] password = self.user['password'] domain_name = self.domain['name'] - data = self.build_authentication_request(username=user_name, - user_domain_name=domain_name, - password=password) + data = self.build_authentication_request( + username=user_name, user_domain_name=domain_name, password=password + ) self.post('/auth/tokens', body=data) self._assert_last_note(self.ACTION, user_id) - def _test_role_assignment(self, url, role, project=None, domain=None, - user=None, group=None): + def _test_role_assignment( + self, url, role, project=None, domain=None, user=None, group=None + ): self.put(url) action = "%s.%s" % (CREATED_OPERATION, self.ROLE_ASSIGNMENT) - event_type = '%s.%s.%s' % (notifications.SERVICE, - self.ROLE_ASSIGNMENT, CREATED_OPERATION) + event_type = '%s.%s.%s' % ( + notifications.SERVICE, + self.ROLE_ASSIGNMENT, + CREATED_OPERATION, + ) self._assert_last_note(action, self.user_id, event_type) self._assert_event(role, project, domain, user, group) self.delete(url) action = "%s.%s" % (DELETED_OPERATION, self.ROLE_ASSIGNMENT) - event_type = '%s.%s.%s' % (notifications.SERVICE, - self.ROLE_ASSIGNMENT, DELETED_OPERATION) + event_type = '%s.%s.%s' % ( + notifications.SERVICE, + self.ROLE_ASSIGNMENT, + DELETED_OPERATION, + ) self._assert_last_note(action, self.user_id, event_type) self._assert_event(role, project, domain, user, None) def test_user_project_grant(self): - url = ('/projects/%s/users/%s/roles/%s' % - (self.project_id, self.user_id, self.role_id)) - self._test_role_assignment(url, self.role_id, - project=self.project_id, - user=self.user_id) + url = '/projects/%s/users/%s/roles/%s' % ( + self.project_id, + self.user_id, + self.role_id, + ) + self._test_role_assignment( + url, self.role_id, project=self.project_id, user=self.user_id + ) def test_group_domain_grant(self): group_ref = unit.new_group_ref(domain_id=self.domain_id) group = PROVIDERS.identity_api.create_group(group_ref) PROVIDERS.identity_api.add_user_to_group(self.user_id, group['id']) - url = ('/domains/%s/groups/%s/roles/%s' % - (self.domain_id, group['id'], self.role_id)) - self._test_role_assignment(url, self.role_id, - domain=self.domain_id, - group=group['id']) + url = '/domains/%s/groups/%s/roles/%s' % ( + self.domain_id, + group['id'], + self.role_id, + ) + self._test_role_assignment( + url, self.role_id, domain=self.domain_id, group=group['id'] + ) def test_add_role_to_user_and_project(self): # A notification is sent when add_role_to_user_and_project is called on @@ -1318,11 +1613,13 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): project_ref = unit.new_project_ref(self.domain_id) project = PROVIDERS.resource_api.create_project( - project_ref['id'], project_ref) + project_ref['id'], project_ref + ) project_id = project['id'] PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_id, project_id, self.role_id) + self.user_id, project_id, self.role_id + ) self.assertTrue(self._notifications) note = self._notifications[-1] @@ -1336,15 +1633,17 @@ class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): # called on the assignment manager. PROVIDERS.assignment_api.remove_role_from_user_and_project( - self.user_id, self.project_id, self.role_id) + self.user_id, self.project_id, self.role_id + ) self.assertTrue(self._notifications) note = self._notifications[-1] self.assertEqual('deleted.role_assignment', note['action']) self.assertTrue(note['send_notification_called']) - self._assert_event(self.role_id, project=self.project_id, - user=self.user_id) + self._assert_event( + self.role_id, project=self.project_id, user=self.user_id + ) class TestCallbackRegistration(unit.BaseTestCase): @@ -1375,12 +1674,13 @@ class TestCallbackRegistration(unit.BaseTestCase): resource_type = 'thing' with mock.patch('keystone.notifications.LOG', self.mock_log): notifications.register_event_callback( - CREATED_OPERATION, resource_type, callback) + CREATED_OPERATION, resource_type, callback + ) callback = 'keystone.tests.unit.common.test_notifications.callback' expected_log_data = { 'callback': callback, - 'event': 'identity.%s.created' % resource_type + 'event': 'identity.%s.created' % resource_type, } self.verify_log_message([expected_log_data]) @@ -1391,12 +1691,13 @@ class TestCallbackRegistration(unit.BaseTestCase): with mock.patch('keystone.notifications.LOG', self.mock_log): notifications.register_event_callback( - CREATED_OPERATION, 'thing', C().callback) + CREATED_OPERATION, 'thing', C().callback + ) callback = 'keystone.tests.unit.common.test_notifications.C.callback' expected_log_data = { 'callback': callback, - 'event': 'identity.thing.created' + 'event': 'identity.thing.created', } self.verify_log_message([expected_log_data]) @@ -1410,36 +1711,35 @@ class TestCallbackRegistration(unit.BaseTestCase): with mock.patch('keystone.notifications.LOG', self.mock_log): notifications.register_event_callback( - CREATED_OPERATION, 'thing', [callback, C().callback]) + CREATED_OPERATION, 'thing', [callback, C().callback] + ) callback_1 = 'keystone.tests.unit.common.test_notifications.callback' callback_2 = 'keystone.tests.unit.common.test_notifications.C.callback' expected_log_data = [ - { - 'callback': callback_1, - 'event': 'identity.thing.created' - }, - { - 'callback': callback_2, - 'event': 'identity.thing.created' - }, + {'callback': callback_1, 'event': 'identity.thing.created'}, + {'callback': callback_2, 'event': 'identity.thing.created'}, ] self.verify_log_message(expected_log_data) def test_an_invalid_callback(self): - self.assertRaises(TypeError, - notifications.register_event_callback, - (CREATED_OPERATION, 'thing', object())) + self.assertRaises( + TypeError, + notifications.register_event_callback, + (CREATED_OPERATION, 'thing', object()), + ) def test_an_invalid_event(self): def callback(*args, **kwargs): pass - self.assertRaises(ValueError, - notifications.register_event_callback, - uuid.uuid4().hex, - 'thing', - callback) + self.assertRaises( + ValueError, + notifications.register_event_callback, + uuid.uuid4().hex, + 'thing', + callback, + ) class CADFNotificationsDataTestCase(test_v3.RestfulTestCase): @@ -1480,14 +1780,13 @@ class CADFNotificationsDataTestCase(test_v3.RestfulTestCase): outcome = 'success' event_type = 'identity.authenticate.created' - with mock.patch.object(notifications._get_notifier(), - 'info') as mocked: + with mock.patch.object( + notifications._get_notifier(), 'info' + ) as mocked: - notifications._send_audit_notification(action, - initiator, - outcome, - target, - event_type) + notifications._send_audit_notification( + action, initiator, outcome, target, event_type + ) for mock_args_list in mocked.call_args: if len(mock_args_list) != 0: diff --git a/keystone/tests/unit/common/test_provider_api.py b/keystone/tests/unit/common/test_provider_api.py index 2eff06f57d..5ebb28b448 100644 --- a/keystone/tests/unit/common/test_provider_api.py +++ b/keystone/tests/unit/common/test_provider_api.py @@ -42,7 +42,8 @@ class TestProviderAPIRegistry(unit.BaseTestCase): class TestClass(object): descriptor = provider_api.ProviderAPIs.deferred_provider_lookup( - api=api_name, method='do_something') + api=api_name, method='do_something' + ) test_instance = TestClass() # Accessing the descriptor will raise the known "attribute" error @@ -62,7 +63,8 @@ class TestProviderAPIRegistry(unit.BaseTestCase): self.assertRaises( provider_api.DuplicateProviderError, self._create_manager_instance, - provides_api=test_manager._provides_api) + provides_api=test_manager._provides_api, + ) def test_provider_api_mixin(self): test_manager = self._create_manager_instance() @@ -71,13 +73,14 @@ class TestProviderAPIRegistry(unit.BaseTestCase): pass instance = Testing() - self.assertIs(test_manager, getattr(instance, - test_manager._provides_api)) + self.assertIs( + test_manager, getattr(instance, test_manager._provides_api) + ) def test_manager_api_reference(self): manager = self._create_manager_instance() second_manager = self._create_manager_instance() - self.assertIs(second_manager, getattr(manager, - second_manager._provides_api)) - self.assertIs(manager, getattr(second_manager, - manager._provides_api)) + self.assertIs( + second_manager, getattr(manager, second_manager._provides_api) + ) + self.assertIs(manager, getattr(second_manager, manager._provides_api)) diff --git a/keystone/tests/unit/common/test_rbac_enforcer.py b/keystone/tests/unit/common/test_rbac_enforcer.py index b235eb29d3..1ef5ef367b 100644 --- a/keystone/tests/unit/common/test_rbac_enforcer.py +++ b/keystone/tests/unit/common/test_rbac_enforcer.py @@ -67,20 +67,26 @@ class _TestRBACEnforcerBase(rest.RestfulTestCase): rules = self._testing_policy_rules() enforcer.register_defaults(rules) - self.useFixture(fixtures.MockPatchObject( - self.enforcer, 'register_rules', register_new_rules)) + self.useFixture( + fixtures.MockPatchObject( + self.enforcer, 'register_rules', register_new_rules + ) + ) # Set the possible actions to our limited list original_actions = rbac_enforcer.enforcer._POSSIBLE_TARGET_ACTIONS - rbac_enforcer.enforcer._POSSIBLE_TARGET_ACTIONS = frozenset([ - rule.name for rule in self._testing_policy_rules()]) + rbac_enforcer.enforcer._POSSIBLE_TARGET_ACTIONS = frozenset( + [rule.name for rule in self._testing_policy_rules()] + ) # RESET the FrozenSet of possible target actions to the original # value - self.addCleanup(setattr, - rbac_enforcer.enforcer, - '_POSSIBLE_TARGET_ACTIONS', - original_actions) + self.addCleanup( + setattr, + rbac_enforcer.enforcer, + '_POSSIBLE_TARGET_ACTIONS', + original_actions, + ) # Force a reset on the enforcer to load up new policy rules. self.enforcer._reset() @@ -96,14 +102,13 @@ class _TestRBACEnforcerBase(rest.RestfulTestCase): def _driver_simulation_get_method(self, argument_id): user = self.user_req_admin - return {'id': argument_id, - 'value': 'TEST', - 'owner_id': user['id']} + return {'id': argument_id, 'value': 'TEST', 'owner_id': user['id']} def _setup_flask_restful_api(self): self.restful_api_url_prefix = '/_%s_TEST' % uuid.uuid4().hex - self.restful_api = flask_restful.Api(self.public_app.app, - self.restful_api_url_prefix) + self.restful_api = flask_restful.Api( + self.public_app.app, self.restful_api_url_prefix + ) driver_simulation_method = self._driver_simulation_get_method @@ -123,16 +128,19 @@ class _TestRBACEnforcerBase(rest.RestfulTestCase): self.restful_api_resource = RestfulResource self.restful_api.add_resource( - RestfulResource, '/argument/', '/argument') - self.cleanup_instance('restful_api', 'restful_resource', - 'restful_api_url_prefix') + RestfulResource, '/argument/', '/argument' + ) + self.cleanup_instance( + 'restful_api', 'restful_resource', 'restful_api_url_prefix' + ) def _register_blueprint_to_app(self): # TODO(morgan): remove the need for webtest, but for now just unwrap # by one layer. Once everything is converted to flask, we can fix # the tests to eliminate "webtest". self.public_app.app.register_blueprint( - self.flask_blueprint, url_prefix=self.url_prefix) + self.flask_blueprint, url_prefix=self.url_prefix + ) def _auth_json(self): return { @@ -143,17 +151,11 @@ class _TestRBACEnforcerBase(rest.RestfulTestCase): 'user': { 'name': self.user_req_admin['name'], 'password': self.user_req_admin['password'], - 'domain': { - 'id': self.user_req_admin['domain_id'] - } + 'domain': {'id': self.user_req_admin['domain_id']}, } - } + }, }, - 'scope': { - 'project': { - 'id': self.project_service['id'] - } - } + 'scope': {'project': {'id': self.project_service['id']}}, } } @@ -202,29 +204,37 @@ class TestRBACEnforcerRestAdminAuthToken(_TestRBACEnforcerBase): def test_enforcer_is_admin_check_with_token(self): # Admin-shared token passed and valid, "is_admin" should be true. with self.test_client() as c: - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={authorization.AUTH_TOKEN_HEADER: 'ADMIN'}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={authorization.AUTH_TOKEN_HEADER: 'ADMIN'}, + ) self.assertTrue(self.enforcer._shared_admin_auth_token_set()) def test_enforcer_is_admin_check_without_token(self): with self.test_client() as c: # Admin-shared token passed and invalid, "is_admin" should be false - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={authorization.AUTH_TOKEN_HEADER: 'BOGUS'}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={authorization.AUTH_TOKEN_HEADER: 'BOGUS'}, + ) self.assertFalse(self.enforcer._shared_admin_auth_token_set()) # Admin-shared token not passed, "is_admin" should be false - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex)) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex) + ) self.assertFalse(self.enforcer._shared_admin_auth_token_set()) def test_enforce_call_is_admin(self): with self.test_client() as c: - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={authorization.AUTH_TOKEN_HEADER: 'ADMIN'}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={authorization.AUTH_TOKEN_HEADER: 'ADMIN'}, + ) with mock.patch.object(self.enforcer, '_enforce') as mock_method: self.enforcer.enforce_call(action='example:allowed') mock_method.assert_not_called() @@ -241,21 +251,30 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): path, json=body, follow_redirects=True, - expected_status_code=201) + expected_status_code=201, + ) token_id = r.headers['X-Subject-Token'] - c.get('/v3', headers={'X-Auth-Token': token_id, - 'X-Subject-Token': token_id}) + c.get( + '/v3', + headers={ + 'X-Auth-Token': token_id, + 'X-Subject-Token': token_id, + }, + ) token = PROVIDER_APIS.token_provider_api.validate_token(token_id) subj_token_data = ( - self.enforcer._extract_subject_token_target_data()) + self.enforcer._extract_subject_token_target_data() + ) subj_token_data = subj_token_data['token'] self.assertEqual(token.user_id, subj_token_data['user_id']) self.assertIn('user', subj_token_data) self.assertIn('domain', subj_token_data['user']) - self.assertEqual(token.user_domain['id'], - subj_token_data['user']['domain']['id']) + self.assertEqual( + token.user_domain['id'], + subj_token_data['user']['domain']['id'], + ) def test_extract_filter_data(self): # Test that we are extracting useful filter data from the @@ -280,13 +299,14 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): # dict. qs = '%(expected)s=EXPECTED&%(unexpected)s=UNEXPECTED' % { 'expected': expected_param, - 'unexpected': unexpected_param + 'unexpected': unexpected_param, } # Perform the get with the query-string c.get('%(path)s?%(qs)s' % {'path': get_path, 'qs': qs}) # Extract the filter values. extracted_filter = self.enforcer._extract_filter_values( - [expected_param]) + [expected_param] + ) # Unexpected param is not in the extracted values # Expected param is in the extracted values # Expected param has the expected value @@ -299,8 +319,10 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): # from the environ as expected. The only way to really test is an # instance check. with self.test_client() as c: - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex)) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex) + ) oslo_req_context = self.enforcer._get_oslo_req_context() self.assertIsInstance(oslo_req_context, context.RequestContext) @@ -311,18 +333,22 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): with self.test_client() as c: r = c.post(token_path, json=auth_json, expected_status_code=201) token_id = r.headers.get('X-Subject-Token') - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={'X-Auth-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={'X-Auth-Token': token_id}, + ) self.enforcer._assert_is_authenticated() c.get('/', expected_status_code=300) - self.assertRaises(exception.Unauthorized, - self.enforcer._assert_is_authenticated) + self.assertRaises( + exception.Unauthorized, self.enforcer._assert_is_authenticated + ) oslo_ctx = self.enforcer._get_oslo_req_context() # Set authenticated to a false value that is not None oslo_ctx.authenticated = False - self.assertRaises(exception.Unauthorized, - self.enforcer._assert_is_authenticated) + self.assertRaises( + exception.Unauthorized, self.enforcer._assert_is_authenticated + ) def test_extract_policy_check_credentials(self): # Make sure extracting the creds is the same as what is in the request @@ -332,13 +358,16 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): with self.test_client() as c: r = c.post(token_path, json=auth_json, expected_status_code=201) token_id = r.headers.get('X-Subject-Token') - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={'X-Auth-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={'X-Auth-Token': token_id}, + ) extracted_creds = self.enforcer._extract_policy_check_credentials() self.assertEqual( flask.request.environ.get(authorization.AUTH_CONTEXT_ENV), - extracted_creds) + extracted_creds, + ) def test_extract_member_target_data_inferred(self): # NOTE(morgan): Setup the "resource" object with a 'member_name' attr @@ -356,12 +385,16 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): argument_id = uuid.uuid4().hex with self.test_client() as c: - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - argument_id)) + c.get( + '%s/argument/%s' % (self.restful_api_url_prefix, argument_id) + ) extracted = self.enforcer._extract_member_target_data( - member_target_type=None, member_target=None) - self.assertDictEqual(extracted['target'], - self.restful_api_resource().get(argument_id)) + member_target_type=None, member_target=None + ) + self.assertDictEqual( + extracted['target'], + self.restful_api_resource().get(argument_id), + ) def test_view_args_populated_in_policy_dict(self): # Setup the "resource" object and make a call that has view arguments @@ -369,13 +402,15 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): # that properly checks (substitutes in) a value that is not in "target" # path but in the main policy dict path. - def _enforce_mock_func(credentials, action, target, - do_raise=True): + def _enforce_mock_func(credentials, action, target, do_raise=True): if 'argument_id' not in target: raise exception.ForbiddenAction(action=action) - self.useFixture(fixtures.MockPatchObject( - self.enforcer, '_enforce', _enforce_mock_func)) + self.useFixture( + fixtures.MockPatchObject( + self.enforcer, '_enforce', _enforce_mock_func + ) + ) argument_id = uuid.uuid4().hex @@ -389,21 +424,27 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): path, json=body, follow_redirects=True, - expected_status_code=201) + expected_status_code=201, + ) token_id = r.headers['X-Subject-Token'] - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - argument_id), - headers={'X-Auth-Token': token_id}) + c.get( + '%s/argument/%s' % (self.restful_api_url_prefix, argument_id), + headers={'X-Auth-Token': token_id}, + ) # Use any valid policy as _enforce is mockpatched out self.enforcer.enforce_call(action='example:allowed') - c.get('%s/argument' % self.restful_api_url_prefix, - headers={'X-Auth-Token': token_id}) - self.assertRaises(exception.ForbiddenAction, - self.enforcer.enforce_call, - action='example:allowed') + c.get( + '%s/argument' % self.restful_api_url_prefix, + headers={'X-Auth-Token': token_id}, + ) + self.assertRaises( + exception.ForbiddenAction, + self.enforcer.enforce_call, + action='example:allowed', + ) def test_extract_member_target_data_supplied_target(self): # Test extract member target data with member_target and @@ -411,33 +452,45 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): member_type = uuid.uuid4().hex member_target = {uuid.uuid4().hex: {uuid.uuid4().hex}} extracted = self.enforcer._extract_member_target_data( - member_target_type=member_type, member_target=member_target) - self.assertDictEqual({'target': {member_type: member_target}}, - extracted) + member_target_type=member_type, member_target=member_target + ) + self.assertDictEqual( + {'target': {member_type: member_target}}, extracted + ) def test_extract_member_target_data_bad_input(self): # Test Extract Member Target Data with only "member_target" and only # "member_target_type" and ensure empty dict is returned. - self.assertEqual({}, self.enforcer._extract_member_target_data( - member_target=None, member_target_type=uuid.uuid4().hex)) - self.assertEqual({}, self.enforcer._extract_member_target_data( - member_target={}, member_target_type=None)) + self.assertEqual( + {}, + self.enforcer._extract_member_target_data( + member_target=None, member_target_type=uuid.uuid4().hex + ), + ) + self.assertEqual( + {}, + self.enforcer._extract_member_target_data( + member_target={}, member_target_type=None + ), + ) def test_call_build_enforcement_target(self): assertIn = self.assertIn assertEq = self.assertEqual ref_uuid = uuid.uuid4().hex - def _enforce_mock_func(credentials, action, target, - do_raise=True): + def _enforce_mock_func(credentials, action, target, do_raise=True): assertIn('target.domain.id', target) assertEq(target['target.domain.id'], ref_uuid) def _build_enforcement_target(): return {'domain': {'id': ref_uuid}} - self.useFixture(fixtures.MockPatchObject( - self.enforcer, '_enforce', _enforce_mock_func)) + self.useFixture( + fixtures.MockPatchObject( + self.enforcer, '_enforce', _enforce_mock_func + ) + ) argument_id = uuid.uuid4().hex @@ -449,16 +502,19 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): path, json=body, follow_redirects=True, - expected_status_code=201) + expected_status_code=201, + ) token_id = r.headers['X-Subject-Token'] - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - argument_id), - headers={'X-Auth-Token': token_id}) + c.get( + '%s/argument/%s' % (self.restful_api_url_prefix, argument_id), + headers={'X-Auth-Token': token_id}, + ) self.enforcer.enforce_call( action='example:allowed', - build_target=_build_enforcement_target) + build_target=_build_enforcement_target, + ) def test_policy_enforcer_action_decorator(self): # Create a method that has an action pre-registered @@ -474,7 +530,8 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): with self.test_client() as c: c.get('%s' % self.url_prefix) self.assertEqual( - action, getattr(flask.g, self.enforcer.ACTION_STORE_ATTR)) + action, getattr(flask.g, self.enforcer.ACTION_STORE_ATTR) + ) def test_policy_enforcer_action_invalid_action_decorator(self): # If the "action" is not a registered policy enforcement point, check @@ -492,27 +549,36 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): self.assertRaises(ValueError, _decorator_fails) def test_enforce_call_invalid_action(self): - self.assertRaises(exception.Forbidden, - self.enforcer.enforce_call, - action=uuid.uuid4().hex) + self.assertRaises( + exception.Forbidden, + self.enforcer.enforce_call, + action=uuid.uuid4().hex, + ) def test_enforce_call_not_is_authenticated(self): with self.test_client() as c: - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex)) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex) + ) # Patch the enforcer to return an empty oslo context. - with mock.patch.object(self.enforcer, '_get_oslo_req_context', - return_value=None): + with mock.patch.object( + self.enforcer, '_get_oslo_req_context', return_value=None + ): self.assertRaises( exception.Unauthorized, - self.enforcer.enforce_call, action='example:allowed') + self.enforcer.enforce_call, + action='example:allowed', + ) # Explicitly set "authenticated" on the context to false. ctx = self.enforcer._get_oslo_req_context() ctx.authenticated = False self.assertRaises( exception.Unauthorized, - self.enforcer.enforce_call, action='example:allowed') + self.enforcer.enforce_call, + action='example:allowed', + ) def test_enforce_call_explicit_target_attr(self): token_path = '/v3/auth/tokens' @@ -525,19 +591,25 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): # # TODO(morgan): confirm if subject-token-processing can/should # occur in this form without causing issues. - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={'X-Auth-Token': token_id, - 'X-Subject-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={ + 'X-Auth-Token': token_id, + 'X-Subject-Token': token_id, + }, + ) target = {'myuser': {'id': self.user_req_admin['id']}} - self.enforcer.enforce_call(action='example:target', - target_attr=target) + self.enforcer.enforce_call( + action='example:target', target_attr=target + ) # Ensure extracting the subject-token data is not happening. self.assertRaises( exception.ForbiddenAction, self.enforcer.enforce_call, action='example:subject_token', - target_attr=target) + target_attr=target, + ) def test_enforce_call_with_subject_token_data(self): token_path = '/v3/auth/tokens' @@ -548,10 +620,14 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): # Check that the enforcer passes if user_id and subject token # user_id are the same. example:deprecated should also pass # since it is open enforcement. - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={'X-Auth-Token': token_id, - 'X-Subject-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={ + 'X-Auth-Token': token_id, + 'X-Subject-Token': token_id, + }, + ) self.enforcer.enforce_call(action='example:subject_token') def test_enforce_call_with_member_target_type_and_member_target(self): @@ -563,15 +639,21 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): # check the enforcer properly handles passed in member_target_type # and member_target. This form still extracts data from the subject # token. - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={'X-Auth-Token': token_id, - 'X-Subject-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={ + 'X-Auth-Token': token_id, + 'X-Subject-Token': token_id, + }, + ) target_type = 'myuser' target = {'id': self.user_req_admin['id']} - self.enforcer.enforce_call(action='example:target', - member_target_type=target_type, - member_target=target) + self.enforcer.enforce_call( + action='example:target', + member_target_type=target_type, + member_target=target, + ) # Ensure we're still extracting the subject-token data self.enforcer.enforce_call(action='example:subject_token') @@ -590,10 +672,14 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): token_id = r.headers.get('X-Subject-Token') # check the enforcer properly handles inferred member data get # This form still extracts data from the subject token. - c.get('%s/argument/%s' % (self.restful_api_url_prefix, - uuid.uuid4().hex), - headers={'X-Auth-Token': token_id, - 'X-Subject-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={ + 'X-Auth-Token': token_id, + 'X-Subject-Token': token_id, + }, + ) self.enforcer.enforce_call(action='example:inferred_member_data') # Ensure we're still extracting the subject-token data self.enforcer.enforce_call(action='example:subject_token') @@ -606,37 +692,50 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): token_id = r.headers.get('X-Subject-Token') # Check that the enforcer passes if a filter is supplied *and* # the filter name is passed to enforce_call - c.get('%s/argument/%s?user=%s' % ( - self.restful_api_url_prefix, uuid.uuid4().hex, - self.user_req_admin['id']), - headers={'X-Auth-Token': token_id}) - self.enforcer.enforce_call(action='example:with_filter', - filters=['user']) + c.get( + '%s/argument/%s?user=%s' + % ( + self.restful_api_url_prefix, + uuid.uuid4().hex, + self.user_req_admin['id'], + ), + headers={'X-Auth-Token': token_id}, + ) + self.enforcer.enforce_call( + action='example:with_filter', filters=['user'] + ) # With No Filters passed into enforce_call self.assertRaises( exception.ForbiddenAction, self.enforcer.enforce_call, - action='example:with_filter') + action='example:with_filter', + ) # With No Filters in the PATH - c.get('%s/argument/%s' % ( - self.restful_api_url_prefix, uuid.uuid4().hex), - headers={'X-Auth-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={'X-Auth-Token': token_id}, + ) self.assertRaises( exception.ForbiddenAction, self.enforcer.enforce_call, action='example:with_filter', - filters=['user']) + filters=['user'], + ) # With no filters in the path and no filters passed to enforce_call - c.get('%s/argument/%s' % ( - self.restful_api_url_prefix, uuid.uuid4().hex), - headers={'X-Auth-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={'X-Auth-Token': token_id}, + ) self.assertRaises( exception.ForbiddenAction, self.enforcer.enforce_call, - action='example:with_filter') + action='example:with_filter', + ) def test_enforce_call_with_pre_instantiated_enforcer(self): token_path = '/v3/auth/tokens' @@ -647,15 +746,20 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): token_id = r.headers.get('X-Subject-Token') # Check the enforcer behaves as expected with a pre-instantiated # enforcer passed into .enforce_call() - c.get('%s/argument/%s' % ( - self.restful_api_url_prefix, uuid.uuid4().hex), - headers={'X-Auth-Token': token_id}) - self.enforcer.enforce_call(action='example:allowed', - enforcer=enforcer) - self.assertRaises(exception.ForbiddenAction, - self.enforcer.enforce_call, - action='example:denied', - enforcer=enforcer) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={'X-Auth-Token': token_id}, + ) + self.enforcer.enforce_call( + action='example:allowed', enforcer=enforcer + ) + self.assertRaises( + exception.ForbiddenAction, + self.enforcer.enforce_call, + action='example:denied', + enforcer=enforcer, + ) def test_enforce_call_sets_enforcement_attr(self): # Ensure calls to enforce_call set the value on flask.g that indicates @@ -667,33 +771,44 @@ class TestRBACEnforcerRest(_TestRBACEnforcerBase): # app to have access to g (without an explicit app-context push) r = c.post(token_path, json=auth_json, expected_status_code=201) token_id = r.headers.get('X-Subject-Token') - c.get('%s/argument/%s' % ( - self.restful_api_url_prefix, uuid.uuid4().hex), - headers={'X-Auth-Token': token_id}) + c.get( + '%s/argument/%s' + % (self.restful_api_url_prefix, uuid.uuid4().hex), + headers={'X-Auth-Token': token_id}, + ) # Ensure the attribute is not set self.assertFalse( hasattr( - flask.g, rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR) + flask.g, rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR + ) ) # Set the value to false, like the resource have done automatically setattr( - flask.g, rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR, False) + flask.g, rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR, False + ) # Enforce self.enforcer.enforce_call(action='example:allowed') # Verify the attribute has been set to true. self.assertEqual( - getattr(flask.g, - rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR), - True) + getattr( + flask.g, rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR + ), + True, + ) # Reset Attribute and check that attribute is still set even if # enforcement results in forbidden. setattr( - flask.g, rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR, False) - self.assertRaises(exception.ForbiddenAction, - self.enforcer.enforce_call, - action='example:denied') + flask.g, rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR, False + ) + self.assertRaises( + exception.ForbiddenAction, + self.enforcer.enforce_call, + action='example:denied', + ) self.assertEqual( - getattr(flask.g, - rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR), - True) + getattr( + flask.g, rbac_enforcer.enforcer._ENFORCEMENT_CHECK_ATTR + ), + True, + ) diff --git a/keystone/tests/unit/common/test_resource_options_common.py b/keystone/tests/unit/common/test_resource_options_common.py index 986956a6fe..adb0d868ab 100644 --- a/keystone/tests/unit/common/test_resource_options_common.py +++ b/keystone/tests/unit/common/test_resource_options_common.py @@ -19,16 +19,17 @@ from keystone.tests import unit class TestResourceOptionObjects(unit.BaseTestCase): def test_option_init_validation(self): # option_name must be a string - self.assertRaises(TypeError, - resource_options.ResourceOption, 'test', 1234) + self.assertRaises( + TypeError, resource_options.ResourceOption, 'test', 1234 + ) # option_id must be a string - self.assertRaises(TypeError, - resource_options.ResourceOption, 1234, 'testing') + self.assertRaises( + TypeError, resource_options.ResourceOption, 1234, 'testing' + ) # option_id must be 4 characters - self.assertRaises(ValueError, - resource_options.ResourceOption, - 'testing', - 'testing') + self.assertRaises( + ValueError, resource_options.ResourceOption, 'testing', 'testing' + ) resource_options.ResourceOption('test', 'testing') def test_duplicate_option_cases(self): @@ -37,26 +38,32 @@ class TestResourceOptionObjects(unit.BaseTestCase): option_name_unique = uuid.uuid4().hex option = resource_options.ResourceOption( - option_id_str_valid, option_name_unique) + option_id_str_valid, option_name_unique + ) option_dup_id = resource_options.ResourceOption( - option_id_str_valid, uuid.uuid4().hex) + option_id_str_valid, uuid.uuid4().hex + ) option_dup_name = resource_options.ResourceOption( - uuid.uuid4().hex[:4], option_name_unique) + uuid.uuid4().hex[:4], option_name_unique + ) registry.register_option(option) self.assertRaises(ValueError, registry.register_option, option_dup_id) - self.assertRaises(ValueError, registry.register_option, - option_dup_name) + self.assertRaises( + ValueError, registry.register_option, option_dup_name + ) self.assertIs(1, len(registry.options)) registry.register_option(option) self.assertIs(1, len(registry.options)) def test_registry(self): - option = resource_options.ResourceOption(uuid.uuid4().hex[:4], - uuid.uuid4().hex) - option2 = resource_options.ResourceOption(uuid.uuid4().hex[:4], - uuid.uuid4().hex) + option = resource_options.ResourceOption( + uuid.uuid4().hex[:4], uuid.uuid4().hex + ) + option2 = resource_options.ResourceOption( + uuid.uuid4().hex[:4], uuid.uuid4().hex + ) registry = resource_options.ResourceOptionRegistry('TEST') registry.register_option(option) @@ -67,11 +74,9 @@ class TestResourceOptionObjects(unit.BaseTestCase): self.assertIn(option2.option_name, registry.option_names) self.assertIs(2, len(registry.options)) self.assertIn(option2.option_id, registry.option_ids) - self.assertIs(option, - registry.get_option_by_id(option.option_id)) - self.assertIs(option2, - registry.get_option_by_id(option2.option_id)) - self.assertIs(option, - registry.get_option_by_name(option.option_name)) - self.assertIs(option2, - registry.get_option_by_name(option2.option_name)) + self.assertIs(option, registry.get_option_by_id(option.option_id)) + self.assertIs(option2, registry.get_option_by_id(option2.option_id)) + self.assertIs(option, registry.get_option_by_name(option.option_name)) + self.assertIs( + option2, registry.get_option_by_name(option2.option_name) + ) diff --git a/keystone/tests/unit/common/test_utils.py b/keystone/tests/unit/common/test_utils.py index 163eb02458..dfd4b73bac 100644 --- a/keystone/tests/unit/common/test_utils.py +++ b/keystone/tests/unit/common/test_utils.py @@ -63,8 +63,7 @@ class UtilsTestCase(unit.BaseTestCase): def test_resource_invalid_id(self): # This input is invalid because it's length is more than 64. value = u'x' * 65 - self.assertRaises(ValueError, common_utils.resource_uuid, - value) + self.assertRaises(ValueError, common_utils.resource_uuid, value) def test_hash(self): password = 'right' @@ -88,8 +87,9 @@ class UtilsTestCase(unit.BaseTestCase): password = uuid.uuid4().hex hashed_password = common_utils.hash_password(password) new_hashed_password = common_utils.hash_password(hashed_password) - self.assertFalse(common_utils.check_password(password, - new_hashed_password)) + self.assertFalse( + common_utils.check_password(password, new_hashed_password) + ) def test_verify_long_password_strict(self): self.config_fixture.config(strict_password_check=False) @@ -103,9 +103,11 @@ class UtilsTestCase(unit.BaseTestCase): self.config_fixture.config(strict_password_check=True) self.config_fixture.config(group='identity', max_password_length=5) invalid_password = 'passw0rd' - self.assertRaises(exception.PasswordVerificationError, - common_utils.verify_length_and_trunc_password, - invalid_password) + self.assertRaises( + exception.PasswordVerificationError, + common_utils.verify_length_and_trunc_password, + invalid_password, + ) def test_verify_length_and_trunc_password_throws_validation_error(self): class SpecialObject(object): @@ -117,41 +119,48 @@ class UtilsTestCase(unit.BaseTestCase): self.assertRaises( exception.ValidationError, common_utils.verify_length_and_trunc_password, - invalid_password + invalid_password, ) def test_hash_long_password_truncation(self): self.config_fixture.config(strict_password_check=False) invalid_length_password = '0' * 9999999 hashed = common_utils.hash_password(invalid_length_password) - self.assertTrue(common_utils.check_password(invalid_length_password, - hashed)) + self.assertTrue( + common_utils.check_password(invalid_length_password, hashed) + ) def test_hash_long_password_strict(self): self.config_fixture.config(strict_password_check=True) invalid_length_password = '0' * 9999999 - self.assertRaises(exception.PasswordVerificationError, - common_utils.hash_password, - invalid_length_password) + self.assertRaises( + exception.PasswordVerificationError, + common_utils.hash_password, + invalid_length_password, + ) def test_max_algo_length_truncates_password(self): self.config_fixture.config(strict_password_check=True) - self.config_fixture.config(group='identity', - password_hash_algorithm='bcrypt') - self.config_fixture.config(group='identity', - max_password_length='96') + self.config_fixture.config( + group='identity', password_hash_algorithm='bcrypt' + ) + self.config_fixture.config(group='identity', max_password_length='96') invalid_length_password = '0' * 96 - self.assertRaises(exception.PasswordVerificationError, - common_utils.hash_password, - invalid_length_password) + self.assertRaises( + exception.PasswordVerificationError, + common_utils.hash_password, + invalid_length_password, + ) def test_bcrypt_sha256_not_truncate_password(self): self.config_fixture.config(strict_password_check=True) - self.config_fixture.config(group='identity', - password_hash_algorithm='bcrypt_sha256') + self.config_fixture.config( + group='identity', password_hash_algorithm='bcrypt_sha256' + ) password = '0' * 128 - password_verified = \ - common_utils.verify_length_and_trunc_password(password) + password_verified = common_utils.verify_length_and_trunc_password( + password + ) hashed = common_utils.hash_password(password) self.assertTrue(common_utils.check_password(password, hashed)) self.assertEqual(password.encode('utf-8'), password_verified) @@ -246,8 +255,9 @@ class UtilsTestCase(unit.BaseTestCase): country_name='cn', user_id='user_id', domain_component='test.com', - email_address='user@test.com' - )) + email_address='user@test.com', + ) + ) dn = common_utils.get_certificate_subject_dn(cert_pem) self.assertEqual('test', dn.get('CN')) @@ -267,8 +277,9 @@ class UtilsTestCase(unit.BaseTestCase): locality_name='kawasaki', organization_name='fujitsu', organizational_unit_name='test', - common_name='root' - )) + common_name='root', + ) + ) cert_pem = unit.create_pem_certificate( unit.create_dn( @@ -279,8 +290,11 @@ class UtilsTestCase(unit.BaseTestCase): country_name='cn', user_id='user_id', domain_component='test.com', - email_address='user@test.com' - ), ca=root_cert, ca_key=root_key) + email_address='user@test.com', + ), + ca=root_cert, + ca_key=root_key, + ) dn = common_utils.get_certificate_subject_dn(cert_pem) self.assertEqual('test', dn.get('CN')) @@ -304,13 +318,15 @@ class UtilsTestCase(unit.BaseTestCase): self.assertRaises( exception.ValidationError, common_utils.get_certificate_subject_dn, - 'MIIEkTCCAnkCFDIzsgpdRGF//5ukMuueXnRxQALhMA0GCSqGSIb3DQEBCwUAMIGC') + 'MIIEkTCCAnkCFDIzsgpdRGF//5ukMuueXnRxQALhMA0GCSqGSIb3DQEBCwUAMIGC', + ) def test_get_certificate_issuer_dn_not_pem_format(self): self.assertRaises( exception.ValidationError, common_utils.get_certificate_issuer_dn, - 'MIIEkTCCAnkCFDIzsgpdRGF//5ukMuueXnRxQALhMA0GCSqGSIb3DQEBCwUAMIGC') + 'MIIEkTCCAnkCFDIzsgpdRGF//5ukMuueXnRxQALhMA0GCSqGSIb3DQEBCwUAMIGC', + ) def test_get_certificate_thumbprint(self): cert_pem = '''-----BEGIN CERTIFICATE----- @@ -341,8 +357,9 @@ class UtilsTestCase(unit.BaseTestCase): 4cJHNiTQl8bxfSgwemgSYnnyXM4k -----END CERTIFICATE-----''' thumbprint = common_utils.get_certificate_thumbprint(cert_pem) - self.assertEqual('dMmoJKE9MIJK9VcyahYCb417JDhDfdtTiq_krco8-tk=', - thumbprint) + self.assertEqual( + 'dMmoJKE9MIJK9VcyahYCb417JDhDfdtTiq_krco8-tk=', thumbprint + ) class ServiceHelperTests(unit.BaseTestCase): @@ -366,22 +383,24 @@ class FernetUtilsTestCase(unit.BaseTestCase): ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) logging_fixture = self.useFixture(fixtures.FakeLogger(level=log.DEBUG)) fernet_utilities = fernet_utils.FernetUtils( CONF.fernet_tokens.key_repository, CONF.fernet_tokens.max_active_keys, - 'fernet_tokens' + 'fernet_tokens', ) fernet_utilities.load_keys() expected_debug_message = ( 'Loaded 2 Fernet keys from %(dir)s, but `[fernet_tokens] ' 'max_active_keys = %(max)d`; perhaps there have not been enough ' - 'key rotations to reach `max_active_keys` yet?') % { - 'dir': CONF.fernet_tokens.key_repository, - 'max': CONF.fernet_tokens.max_active_keys} + 'key rotations to reach `max_active_keys` yet?' + ) % { + 'dir': CONF.fernet_tokens.key_repository, + 'max': CONF.fernet_tokens.max_active_keys, + } self.assertIn(expected_debug_message, logging_fixture.output) def test_debug_message_not_logged_when_loading_fernet_credential_key(self): @@ -389,20 +408,22 @@ class FernetUtilsTestCase(unit.BaseTestCase): ksfixtures.KeyRepository( self.config_fixture, 'credential', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) logging_fixture = self.useFixture(fixtures.FakeLogger(level=log.DEBUG)) fernet_utilities = fernet_utils.FernetUtils( CONF.credential.key_repository, credential_fernet.MAX_ACTIVE_KEYS, - 'credential' + 'credential', ) fernet_utilities.load_keys() debug_message = ( 'Loaded 2 Fernet keys from %(dir)s, but `[credential] ' 'max_active_keys = %(max)d`; perhaps there have not been enough ' - 'key rotations to reach `max_active_keys` yet?') % { - 'dir': CONF.credential.key_repository, - 'max': credential_fernet.MAX_ACTIVE_KEYS} + 'key rotations to reach `max_active_keys` yet?' + ) % { + 'dir': CONF.credential.key_repository, + 'max': credential_fernet.MAX_ACTIVE_KEYS, + } self.assertNotIn(debug_message, logging_fixture.output) diff --git a/keystone/tests/unit/contrib/federation/test_utils.py b/keystone/tests/unit/contrib/federation/test_utils.py index ec9025bb26..33dc9e1d00 100644 --- a/keystone/tests/unit/contrib/federation/test_utils.py +++ b/keystone/tests/unit/contrib/federation/test_utils.py @@ -40,16 +40,19 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.flask_app = flask.Flask(__name__) self.cleanup_instance('flask_app') - def assertValidMappedUserObject(self, mapped_properties, - user_type='ephemeral', - domain_id=None): + def assertValidMappedUserObject( + self, mapped_properties, user_type='ephemeral', domain_id=None + ): """Check whether mapped properties object has 'user' within. According to today's rules, RuleProcessor does not have to issue user's id or name. What's actually required is user's type. """ - self.assertIn('user', mapped_properties, - message='Missing user object in mapped properties') + self.assertIn( + 'user', + mapped_properties, + message='Missing user object in mapped properties', + ) user = mapped_properties['user'] self.assertIn('type', user) self.assertEqual(user_type, user['type']) @@ -93,9 +96,7 @@ class MappingRuleEngineTests(unit.BaseTestCase): mapping = mapping_fixtures.MAPPING_LARGE assertion = mapping_fixtures.BAD_TESTER_ASSERTION rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - self.assertRaises(exception.ValidationError, - rp.process, - assertion) + self.assertRaises(exception.ValidationError, rp.process, assertion) def test_rule_engine_regex_many_groups(self): """Should return group CONTRACTOR_GROUP_ID. @@ -160,7 +161,10 @@ class MappingRuleEngineTests(unit.BaseTestCase): name = values.get('user', {}).get('name') self.assertEqual(user_name, name) - self.assertEqual([], group_ids,) + self.assertEqual( + [], + group_ids, + ) def test_rule_engine_not_any_of_many_rules(self): """Should return group EMPLOYEE_GROUP_ID. @@ -217,9 +221,7 @@ class MappingRuleEngineTests(unit.BaseTestCase): mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX assertion = mapping_fixtures.BAD_DEVELOPER_ASSERTION rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - self.assertRaises(exception.ValidationError, - rp.process, - assertion) + self.assertRaises(exception.ValidationError, rp.process, assertion) def _rule_engine_regex_match_and_many_groups(self, assertion): """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID. @@ -251,7 +253,8 @@ class MappingRuleEngineTests(unit.BaseTestCase): """ self._rule_engine_regex_match_and_many_groups( - mapping_fixtures.TESTER_ASSERTION) + mapping_fixtures.TESTER_ASSERTION + ) def test_rule_engine_discards_nonstring_objects(self): """Check whether RuleProcessor discards non string objects. @@ -262,7 +265,8 @@ class MappingRuleEngineTests(unit.BaseTestCase): """ self._rule_engine_regex_match_and_many_groups( - mapping_fixtures.MALFORMED_TESTER_ASSERTION) + mapping_fixtures.MALFORMED_TESTER_ASSERTION + ) def test_rule_engine_regex_blacklist(self): mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_REGEX @@ -275,9 +279,11 @@ class MappingRuleEngineTests(unit.BaseTestCase): 'projects': [], 'group_ids': [], 'group_names': [ - {'name': 'Manager', 'domain': { - 'id': mapping_fixtures.FEDERATED_DOMAIN}} - ] + { + 'name': 'Manager', + 'domain': {'id': mapping_fixtures.FEDERATED_DOMAIN}, + } + ], } self.assertEqual(expected, mapped) @@ -293,11 +299,15 @@ class MappingRuleEngineTests(unit.BaseTestCase): 'projects': [], 'group_ids': [], 'group_names': [ - {'name': 'Employee', 'domain': { - 'id': mapping_fixtures.FEDERATED_DOMAIN}}, - {'name': 'PartTimeEmployee', 'domain': { - 'id': mapping_fixtures.FEDERATED_DOMAIN}} - ] + { + 'name': 'Employee', + 'domain': {'id': mapping_fixtures.FEDERATED_DOMAIN}, + }, + { + 'name': 'PartTimeEmployee', + 'domain': {'id': mapping_fixtures.FEDERATED_DOMAIN}, + }, + ], } self.assertEqual(expected, mapped) @@ -313,9 +323,7 @@ class MappingRuleEngineTests(unit.BaseTestCase): mapping = mapping_fixtures.MAPPING_SMALL rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) assertion = mapping_fixtures.CONTRACTOR_MALFORMED_ASSERTION - self.assertRaises(exception.ValidationError, - rp.process, - assertion) + self.assertRaises(exception.ValidationError, rp.process, assertion) def test_using_remote_direct_mapping_that_doesnt_exist_fails(self): """Test for the correct error when referring to a bad remote match. @@ -327,9 +335,7 @@ class MappingRuleEngineTests(unit.BaseTestCase): rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) assertion = mapping_fixtures.CUSTOMER_ASSERTION - self.assertRaises(exception.DirectMappingError, - rp.process, - assertion) + self.assertRaises(exception.DirectMappingError, rp.process, assertion) def test_rule_engine_returns_group_names(self): """Check whether RuleProcessor returns group names with their domains. @@ -346,20 +352,16 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertIsNotNone(mapped_properties) self.assertValidMappedUserObject(mapped_properties) reference = { - mapping_fixtures.DEVELOPER_GROUP_NAME: - { + mapping_fixtures.DEVELOPER_GROUP_NAME: { "name": mapping_fixtures.DEVELOPER_GROUP_NAME, "domain": { "name": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_NAME - } + }, }, - mapping_fixtures.TESTER_GROUP_NAME: - { + mapping_fixtures.TESTER_GROUP_NAME: { "name": mapping_fixtures.TESTER_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } - } + "domain": {"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID}, + }, } for rule in mapped_properties['group_names']: self.assertDictEqual(reference.get(rule.get('name')), rule) @@ -379,20 +381,14 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertIsNotNone(mapped_properties) reference = { - mapping_fixtures.DEVELOPER_GROUP_NAME: - { + mapping_fixtures.DEVELOPER_GROUP_NAME: { "name": mapping_fixtures.DEVELOPER_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } + "domain": {"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID}, }, - mapping_fixtures.CONTRACTOR_GROUP_NAME: - { + mapping_fixtures.CONTRACTOR_GROUP_NAME: { "name": mapping_fixtures.CONTRACTOR_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } - } + "domain": {"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID}, + }, } for rule in mapped_properties['group_names']: self.assertDictEqual(reference.get(rule.get('name')), rule) @@ -415,12 +411,9 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertIsNotNone(mapped_properties) reference = { - mapping_fixtures.CONTRACTOR_GROUP_NAME: - { + mapping_fixtures.CONTRACTOR_GROUP_NAME: { "name": mapping_fixtures.CONTRACTOR_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } + "domain": {"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID}, } } for rule in mapped_properties['group_names']: @@ -443,12 +436,9 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertIsNotNone(mapped_properties) reference = { - mapping_fixtures.CONTRACTOR_GROUP_NAME: - { + mapping_fixtures.CONTRACTOR_GROUP_NAME: { "name": mapping_fixtures.CONTRACTOR_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } + "domain": {"id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID}, } } for rule in mapped_properties['group_names']: @@ -532,8 +522,10 @@ class MappingRuleEngineTests(unit.BaseTestCase): mapped_properties = rp.process(assertion) self.assertIsNotNone(mapped_properties) self.assertValidMappedUserObject( - mapped_properties, user_type='local', - domain_id=mapping_fixtures.LOCAL_DOMAIN) + mapped_properties, + user_type='local', + domain_id=mapping_fixtures.LOCAL_DOMAIN, + ) def test_user_identifications_name(self): """Test various mapping options and how users are identified. @@ -557,16 +549,18 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertEqual('jsmith', mapped_properties['user']['name']) resource_api_mock = mock.patch( - 'keystone.resource.core.DomainConfigManager') + 'keystone.resource.core.DomainConfigManager' + ) idp_domain_id = uuid.uuid4().hex - mapped.validate_and_prepare_federated_user(mapped_properties, - idp_domain_id, - resource_api_mock) + mapped.validate_and_prepare_federated_user( + mapped_properties, idp_domain_id, resource_api_mock + ) self.assertEqual('jsmith', mapped_properties['user']['id']) self.assertEqual('jsmith', mapped_properties['user']['name']) - self.assertEqual(idp_domain_id, - mapped_properties['user']['domain']['id']) + self.assertEqual( + idp_domain_id, mapped_properties['user']['domain']['id'] + ) def test_user_identifications_name_and_federated_domain(self): """Test various mapping options and how users are identified. @@ -588,18 +582,21 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertValidMappedUserObject(mapped_properties) resource_api_mock = mock.patch( - 'keystone.resource.core.DomainConfigManager') + 'keystone.resource.core.DomainConfigManager' + ) idp_domain_id = uuid.uuid4().hex user_domain_id = mapped_properties['user']['domain']['id'] - mapped.validate_and_prepare_federated_user(mapped_properties, - idp_domain_id, - resource_api_mock) + mapped.validate_and_prepare_federated_user( + mapped_properties, idp_domain_id, resource_api_mock + ) self.assertEqual('tbo', mapped_properties['user']['name']) - self.assertEqual('abc123%40example.com', - mapped_properties['user']['id']) - self.assertEqual(user_domain_id, - mapped_properties['user']['domain']['id']) + self.assertEqual( + 'abc123%40example.com', mapped_properties['user']['id'] + ) + self.assertEqual( + user_domain_id, mapped_properties['user']['domain']['id'] + ) def test_user_identification_id(self): """Test various mapping options and how users are identified. @@ -621,16 +618,18 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertValidMappedUserObject(mapped_properties) with self.flask_app.test_request_context(): resource_api_mock = mock.patch( - 'keystone.resource.core.DomainConfigManager') + 'keystone.resource.core.DomainConfigManager' + ) idp_domain_id = uuid.uuid4().hex - mapped.validate_and_prepare_federated_user(mapped_properties, - idp_domain_id, - resource_api_mock) + mapped.validate_and_prepare_federated_user( + mapped_properties, idp_domain_id, resource_api_mock + ) self.assertEqual('bob', mapped_properties['user']['name']) self.assertEqual('bob', mapped_properties['user']['id']) - self.assertEqual(idp_domain_id, - mapped_properties['user']['domain']['id']) + self.assertEqual( + idp_domain_id, mapped_properties['user']['domain']['id'] + ) def test_get_user_unique_id_and_display_name(self): @@ -642,18 +641,21 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertIsNotNone(mapped_properties) self.assertValidMappedUserObject(mapped_properties) with self.flask_app.test_request_context( - environ_base={'REMOTE_USER': 'remote_user'}): + environ_base={'REMOTE_USER': 'remote_user'} + ): resource_api_mock = mock.patch( - 'keystone.resource.core.DomainConfigManager') + 'keystone.resource.core.DomainConfigManager' + ) idp_domain_id = uuid.uuid4().hex - mapped.validate_and_prepare_federated_user(mapped_properties, - idp_domain_id, - resource_api_mock) + mapped.validate_and_prepare_federated_user( + mapped_properties, idp_domain_id, resource_api_mock + ) self.assertEqual('remote_user', mapped_properties['user']['name']) self.assertEqual('bob', mapped_properties['user']['id']) - self.assertEqual(idp_domain_id, - mapped_properties['user']['domain']['id']) + self.assertEqual( + idp_domain_id, mapped_properties['user']['domain']['id'] + ) def test_user_identification_id_and_name(self): """Test various mapping options and how users are identified. @@ -676,8 +678,10 @@ class MappingRuleEngineTests(unit.BaseTestCase): not to change it. """ - testcases = [(mapping_fixtures.CUSTOMER_ASSERTION, 'bwilliams'), - (mapping_fixtures.EMPLOYEE_ASSERTION, 'tbo')] + testcases = [ + (mapping_fixtures.CUSTOMER_ASSERTION, 'bwilliams'), + (mapping_fixtures.EMPLOYEE_ASSERTION, 'tbo'), + ] for assertion, exp_user_name in testcases: mapping = mapping_fixtures.MAPPING_USER_IDS rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) @@ -686,18 +690,21 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertValidMappedUserObject(mapped_properties) resource_api_mock = mock.patch( - 'keystone.resource.core.DomainConfigManager') + 'keystone.resource.core.DomainConfigManager' + ) idp_domain_id = uuid.uuid4().hex user_domain_id = mapped_properties['user']['domain']['id'] - mapped.validate_and_prepare_federated_user(mapped_properties, - idp_domain_id, - resource_api_mock) + mapped.validate_and_prepare_federated_user( + mapped_properties, idp_domain_id, resource_api_mock + ) self.assertEqual(exp_user_name, mapped_properties['user']['name']) - self.assertEqual('abc123%40example.com', - mapped_properties['user']['id']) - self.assertEqual(user_domain_id, - mapped_properties['user']['domain']['id']) + self.assertEqual( + 'abc123%40example.com', mapped_properties['user']['id'] + ) + self.assertEqual( + user_domain_id, mapped_properties['user']['domain']['id'] + ) def test_whitelist_pass_through(self): mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH @@ -707,14 +714,17 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertValidMappedUserObject(mapped_properties) self.assertEqual('developacct', mapped_properties['user']['name']) - self.assertEqual('Developer', - mapped_properties['group_names'][0]['name']) + self.assertEqual( + 'Developer', mapped_properties['group_names'][0]['name'] + ) def test_mapping_validation_with_incorrect_local_keys(self): mapping = mapping_fixtures.MAPPING_BAD_LOCAL_SETUP - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_mapping_validation_with_user_name_and_domain_name(self): mapping = mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME @@ -734,60 +744,74 @@ class MappingRuleEngineTests(unit.BaseTestCase): def test_mapping_validation_bad_domain(self): mapping = mapping_fixtures.MAPPING_BAD_DOMAIN - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_mapping_validation_bad_group(self): mapping = mapping_fixtures.MAPPING_BAD_GROUP - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_mapping_validation_with_group_name_without_domain(self): mapping = mapping_fixtures.MAPPING_GROUP_NAME_WITHOUT_DOMAIN - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_mapping_validation_with_group_id_and_domain(self): mapping = mapping_fixtures.MAPPING_GROUP_ID_WITH_DOMAIN - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_mapping_validation_with_bad_local_type_user_in_assertion(self): mapping = mapping_fixtures.MAPPING_BAD_LOCAL_TYPE_USER_IN_ASSERTION - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_mapping_validation_no_local(self): mapping = mapping_fixtures.MAPPING_MISSING_LOCAL - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_mapping_validataion_no_remote(self): mapping = mapping_fixtures.MAPPING_NO_REMOTE - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_mapping_validation_no_type(self): mapping = mapping_fixtures.MAPPING_MISSING_TYPE - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) + self.assertRaises( + exception.ValidationError, + mapping_utils.validate_mapping_structure, + mapping, + ) def test_type_not_in_assertion(self): """Test that if the remote "type" is not in the assertion it fails.""" mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) assertion = {uuid.uuid4().hex: uuid.uuid4().hex} - self.assertRaises(exception.ValidationError, - rp.process, - assertion) + self.assertRaises(exception.ValidationError, rp.process, assertion) def test_rule_engine_groups_mapping_only_one_group(self): """Test mapping engine when groups is explicitly set. @@ -802,10 +826,12 @@ class MappingRuleEngineTests(unit.BaseTestCase): mapped_properties = rp.process(assertion) self.assertIsNotNone(mapped_properties) self.assertEqual('jsmith', mapped_properties['user']['name']) - self.assertEqual('jill@example.com', - mapped_properties['user']['email']) - self.assertEqual('ALL USERS', - mapped_properties['group_names'][0]['name']) + self.assertEqual( + 'jill@example.com', mapped_properties['user']['email'] + ) + self.assertEqual( + 'ALL USERS', mapped_properties['group_names'][0]['name'] + ) def test_rule_engine_groups_mapping_only_one_numerical_group(self): """Test mapping engine when groups is explicitly set. @@ -820,10 +846,10 @@ class MappingRuleEngineTests(unit.BaseTestCase): mapped_properties = rp.process(assertion) self.assertIsNotNone(mapped_properties) self.assertEqual('jsmith', mapped_properties['user']['name']) - self.assertEqual('jill@example.com', - mapped_properties['user']['email']) - self.assertEqual('1234', - mapped_properties['group_names'][0]['name']) + self.assertEqual( + 'jill@example.com', mapped_properties['user']['email'] + ) + self.assertEqual('1234', mapped_properties['group_names'][0]['name']) def test_rule_engine_group_ids_mapping_whitelist(self): """Test mapping engine when group_ids is explicitly set. @@ -838,8 +864,9 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertIsNotNone(mapped_properties) self.assertEqual('opilotte', mapped_properties['user']['name']) self.assertListEqual([], mapped_properties['group_names']) - self.assertCountEqual(['abc123', 'ghi789', 'klm012'], - mapped_properties['group_ids']) + self.assertCountEqual( + ['abc123', 'ghi789', 'klm012'], mapped_properties['group_ids'] + ) def test_rule_engine_group_ids_mapping_blacklist(self): """Test mapping engine when group_ids is explicitly set. @@ -854,8 +881,9 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertIsNotNone(mapped_properties) self.assertEqual('opilotte', mapped_properties['user']['name']) self.assertListEqual([], mapped_properties['group_names']) - self.assertCountEqual(['abc123', 'ghi789', 'klm012'], - mapped_properties['group_ids']) + self.assertCountEqual( + ['abc123', 'ghi789', 'klm012'], mapped_properties['group_ids'] + ) def test_rule_engine_group_ids_mapping_only_one_group(self): """Test mapping engine when group_ids is explicitly set. @@ -871,8 +899,9 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertIsNotNone(mapped_properties) self.assertEqual('opilotte', mapped_properties['user']['name']) self.assertListEqual([], mapped_properties['group_names']) - self.assertCountEqual(['210mlk', '321cba'], - mapped_properties['group_ids']) + self.assertCountEqual( + ['210mlk', '321cba'], mapped_properties['group_ids'] + ) def test_mapping_projects(self): mapping = mapping_fixtures.MAPPING_PROJECTS @@ -885,18 +914,12 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertEqual(expected_username, values['user']['name']) expected_projects = [ - { - "name": "Production", - "roles": [{"name": "observer"}] - }, - { - "name": "Staging", - "roles": [{"name": "member"}] - }, + {"name": "Production", "roles": [{"name": "observer"}]}, + {"name": "Staging", "roles": [{"name": "member"}]}, { "name": "Project for %s" % expected_username, - "roles": [{"name": "admin"}] - } + "roles": [{"name": "admin"}], + }, ] self.assertEqual(expected_projects, values['projects']) @@ -923,7 +946,10 @@ class MappingRuleEngineTests(unit.BaseTestCase): self.assertEqual(user_name, name) self.assertEqual(user_groups, group_list) - self.assertEqual([], group_ids, ) + self.assertEqual( + [], + group_ids, + ) class TestUnicodeAssertionData(unit.BaseTestCase): @@ -938,8 +964,7 @@ class TestUnicodeAssertionData(unit.BaseTestCase): def setUp(self): super(TestUnicodeAssertionData, self).setUp() self.config_fixture = self.useFixture(config_fixture.Config(CONF)) - self.config_fixture.config(group='federation', - assertion_prefix='PFX') + self.config_fixture.config(group='federation', assertion_prefix='PFX') def _pull_mapping_rules_from_the_database(self): # NOTE(dstanek): In a live system. The rules are dumped into JSON bytes @@ -948,8 +973,9 @@ class TestUnicodeAssertionData(unit.BaseTestCase): # Most of tests in this file incorrectly assume the mapping fixture # dictionary is the same as what it would look like coming out of the # database. The string, when coming out of the database, are all text. - return jsonutils.loads(jsonutils.dumps( - mapping_fixtures.MAPPING_UNICODE)) + return jsonutils.loads( + jsonutils.dumps(mapping_fixtures.MAPPING_UNICODE) + ) def _pull_assertion_from_the_request_headers(self): # NOTE(dstanek): In a live system the bytes for the assertion are @@ -959,8 +985,9 @@ class TestUnicodeAssertionData(unit.BaseTestCase): # Create a dummy application app = flask.Flask(__name__) with app.test_request_context( - path='/path', - environ_overrides=mapping_fixtures.UNICODE_NAME_ASSERTION): + path='/path', + environ_overrides=mapping_fixtures.UNICODE_NAME_ASSERTION, + ): data = mapping_utils.get_assertion_params_from_env() # NOTE(dstanek): keystone.auth.plugins.mapped return dict(data) @@ -987,13 +1014,9 @@ class TestMappingLocals(unit.BaseTestCase): { 'user': {'name': '{0}'}, }, - { - 'group': {'id': 'd34db33f'} - } + {'group': {'id': 'd34db33f'}}, ], - 'remote': [ - {'type': 'idp_username'} - ] + 'remote': [{'type': 'idp_username'}], } ] } @@ -1001,14 +1024,9 @@ class TestMappingLocals(unit.BaseTestCase): 'rules': [ { 'local': [ - { - 'user': {'name': '{0}'}, - 'group': {'id': 'd34db33f'} - } + {'user': {'name': '{0}'}, 'group': {'id': 'd34db33f'}} ], - 'remote': [ - {'type': 'idp_username'} - ] + 'remote': [{'type': 'idp_username'}], } ] } @@ -1017,15 +1035,13 @@ class TestMappingLocals(unit.BaseTestCase): { 'local': [ {'user': {'name': 'test_{0}'}}, - {'user': {'name': '{0}'}} + {'user': {'name': '{0}'}}, ], - 'remote': [{'type': 'idp_username'}] + 'remote': [{'type': 'idp_username'}], } ] } - assertion = { - 'idp_username': 'a_user' - } + assertion = {'idp_username': 'a_user'} def process(self, rules): rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, rules) @@ -1033,13 +1049,10 @@ class TestMappingLocals(unit.BaseTestCase): def test_local_list_gets_squashed_into_a_single_dictionary(self): expected = { - 'user': { - 'name': 'a_user', - 'type': 'ephemeral' - }, + 'user': {'name': 'a_user', 'type': 'ephemeral'}, 'projects': [], 'group_ids': ['d34db33f'], - 'group_names': [] + 'group_names': [], } mapped_split = self.process(self.mapping_split['rules']) @@ -1050,13 +1063,10 @@ class TestMappingLocals(unit.BaseTestCase): def test_when_local_list_gets_squashed_first_dict_wins(self): expected = { - 'user': { - 'name': 'test_a_user', - 'type': 'ephemeral' - }, + 'user': {'name': 'test_a_user', 'type': 'ephemeral'}, 'projects': [], 'group_ids': [], - 'group_names': [] + 'group_names': [], } mapped = self.process(self.mapping_with_duplicate['rules']) diff --git a/keystone/tests/unit/core.py b/keystone/tests/unit/core.py index a1e86c7ac6..c6fea39e38 100644 --- a/keystone/tests/unit/core.py +++ b/keystone/tests/unit/core.py @@ -149,6 +149,7 @@ def skip_if_cache_disabled(*sections): section of the configuration is true. """ + def wrapper(f): @functools.wraps(f) def inner(*args, **kwargs): @@ -160,7 +161,9 @@ def skip_if_cache_disabled(*sections): if not getattr(conf_sec, 'caching', True): raise unittest.SkipTest('%s caching disabled.' % s) return f(*args, **kwargs) + return inner + return wrapper @@ -175,18 +178,22 @@ def skip_if_cache_is_enabled(*sections): if getattr(conf_sec, 'caching', True): raise unittest.SkipTest('%s caching enabled.' % s) return f(*args, **kwargs) + return inner + return wrapper def skip_if_no_multiple_domains_support(f): """Decorator to skip tests for identity drivers limited to one domain.""" + @functools.wraps(f) def wrapper(*args, **kwargs): test_obj = args[0] if not test_obj.identity_api.multiple_domains_supported: raise unittest.SkipTest('No multiple domains support') return f(*args, **kwargs) + return wrapper @@ -198,7 +205,8 @@ def new_region_ref(parent_region_id=None, **kwargs): ref = { 'id': uuid.uuid4().hex, 'description': uuid.uuid4().hex, - 'parent_region_id': parent_region_id} + 'parent_region_id': parent_region_id, + } ref.update(kwargs) return ref @@ -219,8 +227,9 @@ def new_service_ref(**kwargs): NEEDS_REGION_ID = object() -def new_endpoint_ref(service_id, interface='public', - region_id=NEEDS_REGION_ID, **kwargs): +def new_endpoint_ref( + service_id, interface='public', region_id=NEEDS_REGION_ID, **kwargs +): ref = { 'id': uuid.uuid4().hex, @@ -247,20 +256,22 @@ def new_endpoint_group_ref(filters, **kwargs): 'id': uuid.uuid4().hex, 'description': uuid.uuid4().hex, 'filters': filters, - 'name': uuid.uuid4().hex + 'name': uuid.uuid4().hex, } ref.update(kwargs) return ref -def new_endpoint_ref_with_region(service_id, region, interface='public', - **kwargs): +def new_endpoint_ref_with_region( + service_id, region, interface='public', **kwargs +): """Define an endpoint_ref having a pre-3.2 form. Contains the deprecated 'region' instead of 'region_id'. """ - ref = new_endpoint_ref(service_id, interface, region=region, - region_id='invalid', **kwargs) + ref = new_endpoint_ref( + service_id, interface, region=region, region_id='invalid', **kwargs + ) del ref['region_id'] return ref @@ -272,7 +283,7 @@ def new_domain_ref(**kwargs): 'description': uuid.uuid4().hex, 'enabled': True, 'tags': [], - 'options': {} + 'options': {}, } ref.update(kwargs) return ref @@ -287,7 +298,7 @@ def new_project_ref(domain_id=None, is_domain=False, **kwargs): 'domain_id': domain_id, 'is_domain': is_domain, 'tags': [], - 'options': {} + 'options': {}, } # NOTE(henry-nash): We don't include parent_id in the initial list above # since specifying it is optional depending on where the project sits in @@ -324,10 +335,7 @@ def new_federated_user_ref(idp_id=None, protocol_id=None, **kwargs): def new_mapping_ref(mapping_id=None, rules=None, **kwargs): - ref = { - 'id': mapping_id or uuid.uuid4().hex, - 'rules': rules or [] - } + ref = {'id': mapping_id or uuid.uuid4().hex, 'rules': rules or []} ref.update(kwargs) return ref @@ -336,7 +344,7 @@ def new_protocol_ref(protocol_id=None, idp_id=None, mapping_id=None, **kwargs): ref = { 'id': protocol_id or 'saml2', 'idp_id': idp_id or 'ORG_IDP', - 'mapping_id': mapping_id or uuid.uuid4().hex + 'mapping_id': mapping_id or uuid.uuid4().hex, } ref.update(kwargs) return ref @@ -358,7 +366,7 @@ def new_service_provider_ref(**kwargs): 'enabled': True, 'description': uuid.uuid4().hex, 'sp_url': 'https://' + uuid.uuid4().hex + '.com', - 'relay_state_prefix': CONF.saml.relay_state_prefix + 'relay_state_prefix': CONF.saml.relay_state_prefix, } ref.update(kwargs) return ref @@ -369,7 +377,7 @@ def new_group_ref(domain_id, **kwargs): 'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, 'description': uuid.uuid4().hex, - 'domain_id': domain_id + 'domain_id': domain_id, } ref.update(kwargs) return ref @@ -395,11 +403,13 @@ def new_cert_credential(user_id, project_id=None, blob=None, **kwargs): if blob is None: blob = {'access': uuid.uuid4().hex, 'secret': uuid.uuid4().hex} - credential = new_credential_ref(user_id=user_id, - project_id=project_id, - blob=json.dumps(blob), - type='cert', - **kwargs) + credential = new_credential_ref( + user_id=user_id, + project_id=project_id, + blob=json.dumps(blob), + type='cert', + **kwargs + ) return blob, credential @@ -408,18 +418,20 @@ def new_ec2_credential(user_id, project_id=None, blob=None, **kwargs): blob = { 'access': uuid.uuid4().hex, 'secret': uuid.uuid4().hex, - 'trust_id': None + 'trust_id': None, } if 'id' not in kwargs: access = blob['access'].encode('utf-8') kwargs['id'] = hashlib.sha256(access).hexdigest() - credential = new_credential_ref(user_id=user_id, - project_id=project_id, - blob=json.dumps(blob), - type='ec2', - **kwargs) + credential = new_credential_ref( + user_id=user_id, + project_id=project_id, + blob=json.dumps(blob), + type='ec2', + **kwargs + ) return blob, credential @@ -428,10 +440,9 @@ def new_totp_credential(user_id, project_id=None, blob=None): # NOTE(notmorgan): 20 bytes of data from secrets.token_bytes for # a totp secret. blob = base64.b32encode(secrets.token_bytes(20)).decode('utf-8') - credential = new_credential_ref(user_id=user_id, - project_id=project_id, - blob=blob, - type='totp') + credential = new_credential_ref( + user_id=user_id, project_id=project_id, blob=blob, type='totp' + ) return credential @@ -506,10 +517,9 @@ def create_pem_certificate(subject_dn, ca=None, ca_key=None): return cert.public_bytes(Encoding.PEM).decode('ascii') -def new_application_credential_ref(roles=None, - name=None, - expires=None, - secret=None): +def new_application_credential_ref( + roles=None, name=None, expires=None, secret=None +): ref = { 'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, @@ -553,7 +563,11 @@ def new_policy_ref(**kwargs): 'description': uuid.uuid4().hex, 'enabled': True, # Store serialized JSON data as the blob to mimic real world usage. - 'blob': json.dumps({'data': uuid.uuid4().hex, }), + 'blob': json.dumps( + { + 'data': uuid.uuid4().hex, + } + ), 'type': uuid.uuid4().hex, } @@ -563,22 +577,29 @@ def new_policy_ref(**kwargs): def new_domain_config_ref(**kwargs): ref = { - "identity": { - "driver": "ldap" - }, + "identity": {"driver": "ldap"}, "ldap": { "url": "ldap://myldap.com:389/", - "user_tree_dn": "ou=Users,dc=my_new_root,dc=org" - } + "user_tree_dn": "ou=Users,dc=my_new_root,dc=org", + }, } ref.update(kwargs) return ref -def new_trust_ref(trustor_user_id, trustee_user_id, project_id=None, - impersonation=None, expires=None, role_ids=None, - role_names=None, remaining_uses=None, - allow_redelegation=False, redelegation_count=None, **kwargs): +def new_trust_ref( + trustor_user_id, + trustee_user_id, + project_id=None, + impersonation=None, + expires=None, + role_ids=None, + role_names=None, + remaining_uses=None, + allow_redelegation=False, + redelegation_count=None, + **kwargs +): ref = { 'id': uuid.uuid4().hex, 'trustor_user_id': trustor_user_id, @@ -621,7 +642,7 @@ def new_registered_limit_ref(**kwargs): 'service_id': uuid.uuid4().hex, 'resource_name': uuid.uuid4().hex, 'default_limit': 10, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } ref.update(kwargs) @@ -633,7 +654,7 @@ def new_limit_ref(**kwargs): 'service_id': uuid.uuid4().hex, 'resource_name': uuid.uuid4().hex, 'resource_limit': 10, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } ref.update(kwargs) @@ -678,7 +699,9 @@ def _assert_expected_status(f): expected_status_code = kwargs.pop( 'expected_status_code', _default_expected_responses.get( - f.__name__.lower(), http.client.OK)) + f.__name__.lower(), http.client.OK + ), + ) response = f(*args, **kwargs) # Logic to verify the response object is sane. Expand as needed @@ -692,13 +715,17 @@ def _assert_expected_status(f): if response.status_code != expected_status_code: raise AssertionError( 'Expected HTTP Status does not match observed HTTP ' - 'Status: %(expected)s != %(observed)s (%(data)s)' % { + 'Status: %(expected)s != %(observed)s (%(data)s)' + % { 'expected': expected_status_code, 'observed': response.status_code, - 'data': response.data}) + 'data': response.data, + } + ) # return the original response object return response + return inner @@ -749,8 +776,9 @@ class BaseTestCase(testtools.TestCase): self.useFixture(fixtures.NestedTempfile()) self.useFixture(fixtures.TempHomeDir()) - self.useFixture(fixtures.MockPatchObject(sys, 'exit', - side_effect=UnexpectedExit)) + self.useFixture( + fixtures.MockPatchObject(sys, 'exit', side_effect=UnexpectedExit) + ) self.useFixture(log_fixture.get_logging_handle_error_fixture()) self.stdlog = self.useFixture(ksfixtures.StandardLogging()) self.useFixture(ksfixtures.WarningsFixture()) @@ -761,22 +789,26 @@ class BaseTestCase(testtools.TestCase): self.useFixture(oslo_ctx_fixture.ClearRequestContext()) orig_debug_level = ldap.get_option(ldap.OPT_DEBUG_LEVEL) - self.addCleanup(ldap.set_option, ldap.OPT_DEBUG_LEVEL, - orig_debug_level) + self.addCleanup( + ldap.set_option, ldap.OPT_DEBUG_LEVEL, orig_debug_level + ) orig_tls_cacertfile = ldap.get_option(ldap.OPT_X_TLS_CACERTFILE) if orig_tls_cacertfile is None: orig_tls_cacertfile = '' - self.addCleanup(ldap.set_option, ldap.OPT_X_TLS_CACERTFILE, - orig_tls_cacertfile) + self.addCleanup( + ldap.set_option, ldap.OPT_X_TLS_CACERTFILE, orig_tls_cacertfile + ) orig_tls_cacertdir = ldap.get_option(ldap.OPT_X_TLS_CACERTDIR) # Setting orig_tls_cacertdir to None is not allowed. if orig_tls_cacertdir is None: orig_tls_cacertdir = '' - self.addCleanup(ldap.set_option, ldap.OPT_X_TLS_CACERTDIR, - orig_tls_cacertdir) + self.addCleanup( + ldap.set_option, ldap.OPT_X_TLS_CACERTDIR, orig_tls_cacertdir + ) orig_tls_require_cert = ldap.get_option(ldap.OPT_X_TLS_REQUIRE_CERT) - self.addCleanup(ldap.set_option, ldap.OPT_X_TLS_REQUIRE_CERT, - orig_tls_require_cert) + self.addCleanup( + ldap.set_option, ldap.OPT_X_TLS_REQUIRE_CERT, orig_tls_require_cert + ) self.addCleanup(ks_ldap.PooledLDAPHandler.connection_pools.clear) def cleanup_instance(self, *names): @@ -785,6 +817,7 @@ class BaseTestCase(testtools.TestCase): :returns: a callable that uses a closure to delete instance attributes """ + def cleanup(): for name in names: # TODO(dstanek): remove this 'if' statement once @@ -792,6 +825,7 @@ class BaseTestCase(testtools.TestCase): # per test if hasattr(self, name): delattr(self, name) + return cleanup def skip_if_env_not_set(self, env_var): @@ -801,8 +835,9 @@ class BaseTestCase(testtools.TestCase): def skip_test_overrides(self, *args, **kwargs): if self._check_for_method_in_parents(self._testMethodName): return super(BaseTestCase, self).skipTest(*args, **kwargs) - raise Exception('%r is not a previously defined test method' - % self._testMethodName) + raise Exception( + '%r is not a previously defined test method' % self._testMethodName + ) def _check_for_method_in_parents(self, name): # skip first to get to parents @@ -822,8 +857,9 @@ class BaseTestCase(testtools.TestCase): content = ( 'TEST PROGRAMMING ERROR - Reached a 404 from an unrouted (`%s`' ') path. Be sure the test is requesting the right resource ' - 'and that all blueprints are registered with the flask app.' % - flask.request.url) + 'and that all blueprints are registered with the flask app.' + % flask.request.url + ) return content, 418 app.register_error_handler(404, page_not_found_teapot) @@ -858,7 +894,8 @@ class TestCase(BaseTestCase): if not environ.get(context.REQUEST_CONTEXT_ENV): environ[context.REQUEST_CONTEXT_ENV] = context.RequestContext( is_admin=is_admin, - authenticated=kwargs.pop('authenticated', True)) + authenticated=kwargs.pop('authenticated', True), + ) # Create a dummy flask app to work with app = flask.Flask(__name__) @@ -882,13 +919,16 @@ class TestCase(BaseTestCase): group='cache', backend='dogpile.cache.memory', enabled=True, - proxies=['oslo_cache.testing.CacheIsolatingProxy']) + proxies=['oslo_cache.testing.CacheIsolatingProxy'], + ) self.config_fixture.config( group='catalog', driver='sql', - template_file=dirs.tests('default_catalog.templates')) + template_file=dirs.tests('default_catalog.templates'), + ) self.config_fixture.config( - group='saml', certfile=signing_certfile, keyfile=signing_keyfile) + group='saml', certfile=signing_certfile, keyfile=signing_keyfile + ) self.config_fixture.config( default_log_levels=[ 'amqp=WARN', @@ -904,7 +944,8 @@ class TestCase(BaseTestCase): 'stevedore.extension=INFO', 'keystone.notifications=INFO', 'keystone.identity.backends.ldap.common=INFO', - ]) + ] + ) # NOTE(notmorgan): Set password rounds low here to ensure speedy # tests. This is explicitly set because the tests here are not testing # the integrity of the password hashing, just that the correct form @@ -916,7 +957,7 @@ class TestCase(BaseTestCase): ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) @@ -924,7 +965,7 @@ class TestCase(BaseTestCase): ksfixtures.KeyRepository( self.config_fixture, 'fernet_receipts', - CONF.fernet_receipts.max_active_keys + CONF.fernet_receipts.max_active_keys, ) ) @@ -944,9 +985,14 @@ class TestCase(BaseTestCase): # cleanup. def mocked_register_auth_plugin_opt(conf, opt): self.config_fixture.register_opt(opt, group='auth') - self.useFixture(fixtures.MockPatchObject( - keystone.conf.auth, '_register_auth_plugin_opt', - new=mocked_register_auth_plugin_opt)) + + self.useFixture( + fixtures.MockPatchObject( + keystone.conf.auth, + '_register_auth_plugin_opt', + new=mocked_register_auth_plugin_opt, + ) + ) self.config_overrides() # explicitly load auth configuration @@ -1004,12 +1050,15 @@ class TestCase(BaseTestCase): # TODO(termie): doing something from json, probably based on Django's # loaddata will be much preferred. - if (hasattr(self, 'identity_api') and - hasattr(self, 'assignment_api') and - hasattr(self, 'resource_api')): + if ( + hasattr(self, 'identity_api') + and hasattr(self, 'assignment_api') + and hasattr(self, 'resource_api') + ): try: PROVIDERS.resource_api.create_domain( - resource_base.NULL_DOMAIN_ID, fixtures.ROOT_DOMAIN) + resource_base.NULL_DOMAIN_ID, fixtures.ROOT_DOMAIN + ) except exception.Conflict: # the root domain already exists, skip now. pass @@ -1022,7 +1071,8 @@ class TestCase(BaseTestCase): for project in fixtures.PROJECTS: project_attr_name = 'project_%s' % project['name'].lower() rv = PROVIDERS.resource_api.create_project( - project['id'], project) + project['id'], project + ) setattr(self, project_attr_name, rv) fixtures_to_cleanup.append(project_attr_name) @@ -1046,7 +1096,8 @@ class TestCase(BaseTestCase): # fixtures.ROLES[2] is the _member_ role. for project_id in projects: PROVIDERS.assignment_api.add_role_to_user_and_project( - user_copy['id'], project_id, fixtures.ROLES[2]['id']) + user_copy['id'], project_id, fixtures.ROLES[2]['id'] + ) # Use the ID from the fixture as the attribute name, so # that our tests can easily reference each user dict, while @@ -1061,7 +1112,8 @@ class TestCase(BaseTestCase): project_id = role_assignment['project_id'] user_id = getattr(self, 'user_%s' % user)['id'] PROVIDERS.assignment_api.add_role_to_user_and_project( - user_id, project_id, role_id) + user_id, project_id, role_id + ) self.addCleanup(self.cleanup_instance(*fixtures_to_cleanup)) @@ -1084,10 +1136,7 @@ class TestCase(BaseTestCase): # is working to eliminate microseconds from it's datetimes used. expected = timeutils.parse_isotime(expected).replace(microsecond=0) value = timeutils.parse_isotime(value).replace(microsecond=0) - self.assertEqual( - expected, - value, - "%s != %s" % (expected, value)) + self.assertEqual(expected, value, "%s != %s" % (expected, value)) def assertNotEmpty(self, iterable): self.assertGreater(len(iterable), 0) @@ -1103,8 +1152,11 @@ class TestCase(BaseTestCase): # NOTE(notmorgan): An empty option list is the same as no options being # specified in the user_ref. This removes options if it is empty in # observed if options is not specified in the expected value. - if ('options' in observed and not observed['options'] and - 'options' not in expected): + if ( + 'options' in observed + and not observed['options'] + and 'options' not in expected + ): observed = observed.copy() del observed['options'] diff --git a/keystone/tests/unit/credential/test_backend_sql.py b/keystone/tests/unit/credential/test_backend_sql.py index 791c10022b..74cf312f5f 100644 --- a/keystone/tests/unit/credential/test_backend_sql.py +++ b/keystone/tests/unit/credential/test_backend_sql.py @@ -49,16 +49,17 @@ class SqlCredential(SqlTests): def _create_credential_with_user_id(self, user_id=None): if not user_id: user_id = uuid.uuid4().hex - credential = unit.new_credential_ref(user_id=user_id, - extra=uuid.uuid4().hex, - type=uuid.uuid4().hex) + credential = unit.new_credential_ref( + user_id=user_id, extra=uuid.uuid4().hex, type=uuid.uuid4().hex + ) PROVIDERS.credential_api.create_credential( credential['id'], credential ) return credential - def _validate_credential_list(self, retrieved_credentials, - expected_credentials): + def _validate_credential_list( + self, retrieved_credentials, expected_credentials + ): self.assertEqual(len(expected_credentials), len(retrieved_credentials)) retrieved_ids = [c['id'] for c in retrieved_credentials] for cred in expected_credentials: @@ -70,7 +71,7 @@ class SqlCredential(SqlTests): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_provider.MAX_ACTIVE_KEYS + credential_provider.MAX_ACTIVE_KEYS, ) ) self.credentials = [] @@ -99,7 +100,7 @@ class SqlCredential(SqlTests): 'id': uuid.uuid4().hex, 'type': uuid.uuid4().hex, 'hash': uuid.uuid4().hex, - 'encrypted_blob': b'randomdata' + 'encrypted_blob': b'randomdata', } ref = credential_sql.CredentialModel.from_dict(cred_dict) # Make sure CredentialModel is handing over a text string @@ -110,6 +111,8 @@ class SqlCredential(SqlTests): config_fixture_ = self.user = self.useFixture(config_fixture.Config()) config_fixture_.config(group='credential', user_limit=4) self._create_credential_with_user_id(self.user_foo['id']) - self.assertRaises(exception.CredentialLimitExceeded, - self._create_credential_with_user_id, - self.user_foo['id']) + self.assertRaises( + exception.CredentialLimitExceeded, + self._create_credential_with_user_id, + self.user_foo['id'], + ) diff --git a/keystone/tests/unit/credential/test_fernet_provider.py b/keystone/tests/unit/credential/test_fernet_provider.py index c5ea7d41e8..c519f61253 100644 --- a/keystone/tests/unit/credential/test_fernet_provider.py +++ b/keystone/tests/unit/credential/test_fernet_provider.py @@ -32,7 +32,7 @@ class TestFernetCredentialProvider(unit.TestCase): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_fernet.MAX_ACTIVE_KEYS + credential_fernet.MAX_ACTIVE_KEYS, ) ) @@ -59,7 +59,7 @@ class TestFernetCredentialProviderWithNullKey(unit.TestCase): # default. self.config_fixture.config( group='credential', - key_repository=self.useFixture(fixtures.TempDir()).path + key_repository=self.useFixture(fixtures.TempDir()).path, ) def test_encryption_with_null_key(self): diff --git a/keystone/tests/unit/default_fixtures.py b/keystone/tests/unit/default_fixtures.py index 549504eca6..44208ef6d7 100644 --- a/keystone/tests/unit/default_fixtures.py +++ b/keystone/tests/unit/default_fixtures.py @@ -35,8 +35,9 @@ PROJECTS = [ 'parent_id': DEFAULT_DOMAIN_ID, 'is_domain': False, 'tags': [], - 'options': {} - }, { + 'options': {}, + }, + { 'id': BAZ_PROJECT_ID, 'name': 'BAZ', 'domain_id': DEFAULT_DOMAIN_ID, @@ -45,8 +46,9 @@ PROJECTS = [ 'parent_id': DEFAULT_DOMAIN_ID, 'is_domain': False, 'tags': [], - 'options': {} - }, { + 'options': {}, + }, + { 'id': MTU_PROJECT_ID, 'name': 'MTU', 'description': 'description', @@ -55,8 +57,9 @@ PROJECTS = [ 'parent_id': DEFAULT_DOMAIN_ID, 'is_domain': False, 'tags': [], - 'options': {} - }, { + 'options': {}, + }, + { 'id': SERVICE_PROJECT_ID, 'name': 'service', 'description': 'description', @@ -65,8 +68,8 @@ PROJECTS = [ 'parent_id': DEFAULT_DOMAIN_ID, 'is_domain': False, 'tags': [], - 'options': {} - } + 'options': {}, + }, ] # NOTE(ja): a role of keystone_admin is done in setUp @@ -90,7 +93,8 @@ USERS = [ 'enabled': True, 'email': 'foo@bar.com', 'options': {}, - }, { + }, + { 'id': uuid.uuid4().hex, 'name': 'two', 'domain_id': DEFAULT_DOMAIN_ID, @@ -100,7 +104,8 @@ USERS = [ 'projects': [BAZ_PROJECT_ID], 'email': 'two@three.com', 'options': {}, - }, { + }, + { 'id': uuid.uuid4().hex, 'name': 'badguy', 'domain_id': DEFAULT_DOMAIN_ID, @@ -110,7 +115,8 @@ USERS = [ 'projects': [BAZ_PROJECT_ID], 'email': 'bad@guy.com', 'options': {}, - }, { + }, + { 'id': uuid.uuid4().hex, 'name': 'sna', 'domain_id': DEFAULT_DOMAIN_ID, @@ -119,7 +125,7 @@ USERS = [ 'projects': [BAR_PROJECT_ID], 'email': 'sna@snl.coom', 'options': {}, - } + }, ] ROLES = [ @@ -127,31 +133,37 @@ ROLES = [ 'id': ADMIN_ROLE_ID, 'name': 'admin', 'domain_id': None, - }, { + }, + { 'id': MEMBER_ROLE_ID, 'name': 'member', 'domain_id': None, - }, { + }, + { 'id': '9fe2ff9ee4384b1894a90878d3e92bab', 'name': '_member_', 'domain_id': None, - }, { + }, + { 'id': OTHER_ROLE_ID, 'name': 'other', 'domain_id': None, - }, { + }, + { 'id': uuid.uuid4().hex, 'name': 'browser', 'domain_id': None, - }, { + }, + { 'id': uuid.uuid4().hex, 'name': 'writer', 'domain_id': None, - }, { + }, + { 'id': uuid.uuid4().hex, 'name': 'service', 'domain_id': None, - } + }, ] # NOTE(morganfainberg): Admin assignment for replacing admin_token_auth @@ -159,28 +171,38 @@ ROLE_ASSIGNMENTS = [ { 'user': 'req_admin', 'project_id': SERVICE_PROJECT_ID, - 'role_id': ADMIN_ROLE_ID + 'role_id': ADMIN_ROLE_ID, }, ] # TODO(wxy): We should add the root domain ``<>`` as well # when the FKs is enabled for the test. Merge ROOT_DOMAIN into DOMAINS once all # test enable FKs. -ROOT_DOMAIN = {'enabled': True, - 'id': '<>', - 'name': '<>'} +ROOT_DOMAIN = { + 'enabled': True, + 'id': '<>', + 'name': '<>', +} -DOMAINS = [{'description': - (u'The default domain'), - 'enabled': True, - 'id': DEFAULT_DOMAIN_ID, - 'name': u'Default'}] +DOMAINS = [ + { + 'description': (u'The default domain'), + 'enabled': True, + 'id': DEFAULT_DOMAIN_ID, + 'name': u'Default', + } +] -SERVICES = [{'id': uuid.uuid4().hex, - 'type': 'type_one', - 'enabled': True, - 'extra': {'description': 'This is a service for test.', - 'name': 'service_one'} - }] +SERVICES = [ + { + 'id': uuid.uuid4().hex, + 'type': 'type_one', + 'enabled': True, + 'extra': { + 'description': 'This is a service for test.', + 'name': 'service_one', + }, + } +] REGIONS = [{'id': 'region_one'}, {'id': 'region_two'}] diff --git a/keystone/tests/unit/endpoint_policy/backends/test_base.py b/keystone/tests/unit/endpoint_policy/backends/test_base.py index 7c63fb3883..931ab28205 100644 --- a/keystone/tests/unit/endpoint_policy/backends/test_base.py +++ b/keystone/tests/unit/endpoint_policy/backends/test_base.py @@ -23,10 +23,12 @@ class DriverTestCase(object): raise exception.NotImplemented() def create_association(self, **kwargs): - association = {'policy_id': uuid.uuid4().hex, - 'endpoint_id': None, - 'service_id': None, - 'region_id': None} + association = { + 'policy_id': uuid.uuid4().hex, + 'endpoint_id': None, + 'service_id': None, + 'region_id': None, + } association.update(kwargs) self.driver.create_policy_association(**association) return association @@ -35,8 +37,9 @@ class DriverTestCase(object): association = self.create_association(endpoint_id=uuid.uuid4().hex) self.driver.check_policy_association(**association) - association = self.create_association(service_id=uuid.uuid4().hex, - region_id=uuid.uuid4().hex) + association = self.create_association( + service_id=uuid.uuid4().hex, region_id=uuid.uuid4().hex + ) self.driver.check_policy_association(**association) association = self.create_association(service_id=uuid.uuid4().hex) @@ -46,37 +49,45 @@ class DriverTestCase(object): # Creating a policy association to a target that already has a policy # associated to it will cause the original policy to be overridden original_association = self.create_association( - service_id=uuid.uuid4().hex) + service_id=uuid.uuid4().hex + ) override_association = original_association.copy() override_association['policy_id'] = uuid.uuid4().hex self.driver.create_policy_association(**override_association) self.driver.check_policy_association(**override_association) - self.assertRaises(exception.PolicyAssociationNotFound, - self.driver.check_policy_association, - **original_association) + self.assertRaises( + exception.PolicyAssociationNotFound, + self.driver.check_policy_association, + **original_association + ) def test_check_policy_association(self): - association = self.create_association(service_id=uuid.uuid4().hex, - region_id=uuid.uuid4().hex) + association = self.create_association( + service_id=uuid.uuid4().hex, region_id=uuid.uuid4().hex + ) self.driver.check_policy_association(**association) # An association is uniquely identified by its target. Omitting any # attribute (region_id in this case) will result in a different check association.pop('region_id') - self.assertRaises(exception.PolicyAssociationNotFound, - self.driver.check_policy_association, - **association) + self.assertRaises( + exception.PolicyAssociationNotFound, + self.driver.check_policy_association, + **association + ) def test_delete_policy_association(self): association = self.create_association(endpoint_id=uuid.uuid4().hex) self.driver.delete_policy_association(**association) - self.assertRaises(exception.PolicyAssociationNotFound, - self.driver.check_policy_association, - **association) + self.assertRaises( + exception.PolicyAssociationNotFound, + self.driver.check_policy_association, + **association + ) def test_get_policy_association(self): association = self.create_association(service_id=uuid.uuid4().hex) @@ -89,62 +100,80 @@ class DriverTestCase(object): def test_list_associations_for_policy(self): policy_id = uuid.uuid4().hex - first = self.create_association(endpoint_id=uuid.uuid4().hex, - policy_id=policy_id) - second = self.create_association(service_id=uuid.uuid4().hex, - policy_id=policy_id) + first = self.create_association( + endpoint_id=uuid.uuid4().hex, policy_id=policy_id + ) + second = self.create_association( + service_id=uuid.uuid4().hex, policy_id=policy_id + ) associations = self.driver.list_associations_for_policy(policy_id) self.assertCountEqual([first, second], associations) def test_delete_association_by_endpoint(self): endpoint_id = uuid.uuid4().hex - associations = [self.create_association(endpoint_id=endpoint_id), - self.create_association(endpoint_id=endpoint_id)] + associations = [ + self.create_association(endpoint_id=endpoint_id), + self.create_association(endpoint_id=endpoint_id), + ] self.driver.delete_association_by_endpoint(endpoint_id) for association in associations: - self.assertRaises(exception.PolicyAssociationNotFound, - self.driver.check_policy_association, - **association) + self.assertRaises( + exception.PolicyAssociationNotFound, + self.driver.check_policy_association, + **association + ) def test_delete_association_by_service(self): service_id = uuid.uuid4().hex - associations = [self.create_association(service_id=service_id), - self.create_association(service_id=service_id)] + associations = [ + self.create_association(service_id=service_id), + self.create_association(service_id=service_id), + ] self.driver.delete_association_by_service(service_id) for association in associations: - self.assertRaises(exception.PolicyAssociationNotFound, - self.driver.check_policy_association, - **association) + self.assertRaises( + exception.PolicyAssociationNotFound, + self.driver.check_policy_association, + **association + ) def test_delete_association_by_region(self): region_id = uuid.uuid4().hex - first = self.create_association(service_id=uuid.uuid4().hex, - region_id=region_id) - second = self.create_association(service_id=uuid.uuid4().hex, - region_id=region_id) + first = self.create_association( + service_id=uuid.uuid4().hex, region_id=region_id + ) + second = self.create_association( + service_id=uuid.uuid4().hex, region_id=region_id + ) self.driver.delete_association_by_region(region_id) for association in [first, second]: - self.assertRaises(exception.PolicyAssociationNotFound, - self.driver.check_policy_association, - **association) + self.assertRaises( + exception.PolicyAssociationNotFound, + self.driver.check_policy_association, + **association + ) def test_delete_association_by_policy(self): policy_id = uuid.uuid4().hex - first = self.create_association(endpoint_id=uuid.uuid4().hex, - policy_id=policy_id) - second = self.create_association(service_id=uuid.uuid4().hex, - policy_id=policy_id) + first = self.create_association( + endpoint_id=uuid.uuid4().hex, policy_id=policy_id + ) + second = self.create_association( + service_id=uuid.uuid4().hex, policy_id=policy_id + ) self.driver.delete_association_by_policy(policy_id) for association in [first, second]: - self.assertRaises(exception.PolicyAssociationNotFound, - self.driver.check_policy_association, - **association) + self.assertRaises( + exception.PolicyAssociationNotFound, + self.driver.check_policy_association, + **association + ) diff --git a/keystone/tests/unit/endpoint_policy/backends/test_sql.py b/keystone/tests/unit/endpoint_policy/backends/test_sql.py index d236ad9133..5cd9f3ad4d 100644 --- a/keystone/tests/unit/endpoint_policy/backends/test_sql.py +++ b/keystone/tests/unit/endpoint_policy/backends/test_sql.py @@ -22,11 +22,13 @@ class SQLModelTestCase(core_sql.BaseBackendSqlModels): """Test cases to validate the table structure.""" def test_policy_association_model(self): - cols = (('id', sql.String, 64), - ('policy_id', sql.String, 64), - ('endpoint_id', sql.String, 64), - ('service_id', sql.String, 64), - ('region_id', sql.String, 64)) + cols = ( + ('id', sql.String, 64), + ('policy_id', sql.String, 64), + ('endpoint_id', sql.String, 64), + ('service_id', sql.String, 64), + ('region_id', sql.String, 64), + ) self.assertExpectedSchema('policy_association', cols) diff --git a/keystone/tests/unit/external/test_timeutils.py b/keystone/tests/unit/external/test_timeutils.py index 662e0ad49c..fca5ca5b87 100644 --- a/keystone/tests/unit/external/test_timeutils.py +++ b/keystone/tests/unit/external/test_timeutils.py @@ -25,7 +25,9 @@ class TestTimeUtils(tests.BaseTestCase): def test_parsing_invalid_date_strings_raises_a_ValueError(self): example_date_str = '' simple_format = '%Y' - self.assertRaises(ValueError, - datetime.datetime.strptime, - example_date_str, - simple_format) + self.assertRaises( + ValueError, + datetime.datetime.strptime, + example_date_str, + simple_format, + ) diff --git a/keystone/tests/unit/fakeldap.py b/keystone/tests/unit/fakeldap.py index 5119305a72..c99ce6b894 100644 --- a/keystone/tests/unit/fakeldap.py +++ b/keystone/tests/unit/fakeldap.py @@ -101,8 +101,9 @@ def _match_query(query, attrs, attrs_checked): matchfn = any # cut off the & or | groups = _paren_groups(inner[1:]) - return matchfn(_match_query(group, attrs, attrs_checked) - for group in groups) + return matchfn( + _match_query(group, attrs, attrs_checked) for group in groups + ) if inner.startswith('!'): # cut off the ! and the nested parentheses return not _match_query(query[2:-1], attrs, attrs_checked) @@ -125,12 +126,13 @@ def _paren_groups(source): if source[pos] == ')': count -= 1 if count == 0: - result.append(source[start:pos + 1]) + result.append(source[start : pos + 1]) return result def _match(key, value, attrs): """Match a given key and value against an attribute list.""" + def match_with_wildcards(norm_val, val_list): # Case insensitive checking with wildcards if norm_val.startswith('*'): @@ -142,13 +144,12 @@ def _match(key, value, attrs): else: # Is the string at the end of the target? for x in val_list: - if (norm_val[1:] == - x[len(x) - len(norm_val) + 1:]): + if norm_val[1:] == x[len(x) - len(norm_val) + 1 :]: return True elif norm_val.endswith('*'): # Is the string at the start of the target? for x in val_list: - if norm_val[:-1] == x[:len(norm_val) - 1]: + if norm_val[:-1] == x[: len(norm_val) - 1]: return True else: # Is the string an exact match? @@ -171,7 +172,8 @@ def _match(key, value, attrs): if key != 'objectclass': check_value = _internal_attr(key, value)[0].lower() norm_values = list( - _internal_attr(key, x)[0].lower() for x in attrs[key]) + _internal_attr(key, x)[0].lower() for x in attrs[key] + ) return match_with_wildcards(check_value, norm_values) # It is an objectclass check, so check subclasses values = _subs(value) @@ -189,9 +191,13 @@ def _subs(value): so subclasses need to be defined manually in the dictionary below. """ - subs = {'groupOfNames': ['keystoneProject', - 'keystoneRole', - 'keystoneProjectRole']} + subs = { + 'groupOfNames': [ + 'keystoneProject', + 'keystoneRole', + 'keystoneProjectRole', + ] + } if value in subs: return [value] + subs[value] return [value] @@ -238,13 +244,25 @@ class FakeLdap(common.LDAPHandler): super(FakeLdap, self).__init__(conn=conn) self._ldap_options = {ldap.OPT_DEREF: ldap.DEREF_NEVER} - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert='demand', chase_referrals=None, debug_level=None, - use_pool=None, pool_size=None, pool_retry_max=None, - pool_retry_delay=None, pool_conn_timeout=None, - pool_conn_lifetime=None, - conn_timeout=None): + def connect( + self, + url, + page_size=0, + alias_dereferencing=None, + use_tls=False, + tls_cacertfile=None, + tls_cacertdir=None, + tls_req_cert='demand', + chase_referrals=None, + debug_level=None, + use_pool=None, + pool_size=None, + pool_retry_max=None, + pool_retry_delay=None, + pool_conn_timeout=None, + pool_conn_lifetime=None, + conn_timeout=None, + ): if url.startswith('fake://memory'): if url not in FakeShelves: FakeShelves[url] = FakeShelve() @@ -265,8 +283,9 @@ class FakeLdap(common.LDAPHandler): if tls_req_cert in list(common.LDAP_TLS_CERTS.values()): ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert) else: - raise ValueError("invalid TLS_REQUIRE_CERT tls_req_cert=%s", - tls_req_cert) + raise ValueError( + "invalid TLS_REQUIRE_CERT tls_req_cert=%s", tls_req_cert + ) if alias_dereferencing is not None: self.set_option(ldap.OPT_DEREF, alias_dereferencing) @@ -289,13 +308,14 @@ class FakeLdap(common.LDAPHandler): def key(self, dn): return '%s%s' % (self.__prefix, dn) - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): + def simple_bind_s( + self, who='', cred='', serverctrls=None, clientctrls=None + ): """Provide for compatibility but this method is ignored.""" if server_fail: raise ldap.SERVER_DOWN whos = ['cn=Admin', CONF.ldap.user] - if (who in whos and cred in ['password', CONF.ldap.password]): + if who in whos and cred in ['password', CONF.ldap.password]: self.connected = True self.who = who self.cred = cred @@ -337,8 +357,9 @@ class FakeLdap(common.LDAPHandler): # The LDAP API raises a TypeError if attr name is None. for k, dummy_v in modlist: if k is None: - raise TypeError('must be string, not None. modlist=%s' % - modlist) + raise TypeError( + 'must be string, not None. modlist=%s' % modlist + ) if k == id_attr: for val in dummy_v: @@ -346,12 +367,16 @@ class FakeLdap(common.LDAPHandler): id_attr_in_modlist = True if not id_attr_in_modlist: - LOG.debug('id_attribute=%(attr)s missing, attributes=%(attrs)s', - {'attr': id_attr, 'attrs': modlist}) + LOG.debug( + 'id_attribute=%(attr)s missing, attributes=%(attrs)s', + {'attr': id_attr, 'attrs': modlist}, + ) raise ldap.NAMING_VIOLATION key = self.key(dn) - LOG.debug('add item: dn=%(dn)s, attrs=%(attrs)s', { - 'dn': dn, 'attrs': modlist}) + LOG.debug( + 'add item: dn=%(dn)s, attrs=%(attrs)s', + {'dn': dn, 'attrs': modlist}, + ) if key in self.db: LOG.debug('add item failed: dn=%s is already in store.', dn) raise ldap.ALREADY_EXISTS(dn) @@ -364,10 +389,13 @@ class FakeLdap(common.LDAPHandler): return self.delete_ext_s(dn, serverctrls=[]) def _getChildren(self, dn): - return [k for k, v in self.db.items() - if re.match('%s.*,%s' % ( - re.escape(self.__prefix), - re.escape(dn)), k)] + return [ + k + for k, v in self.db.items() + if re.match( + '%s.*,%s' % (re.escape(self.__prefix), re.escape(dn)), k + ) + ] def delete_ext_s(self, dn, serverctrls, clientctrls=None): """Remove the ldap object at specified dn.""" @@ -394,8 +422,10 @@ class FakeLdap(common.LDAPHandler): raise ldap.SERVER_DOWN key = self.key(dn) - LOG.debug('modify item: dn=%(dn)s attrs=%(attrs)s', { - 'dn': dn, 'attrs': modlist}) + LOG.debug( + 'modify item: dn=%(dn)s attrs=%(attrs)s', + {'dn': dn, 'attrs': modlist}, + ) try: entry = self.db[key] except KeyError: @@ -415,8 +445,11 @@ class FakeLdap(common.LDAPHandler): elif cmd == ldap.MOD_DELETE: if v is None: if not values: - LOG.debug('modify item failed: ' - 'item has no attribute "%s" to delete', k) + LOG.debug( + 'modify item failed: ' + 'item has no attribute "%s" to delete', + k, + ) raise ldap.NO_SUCH_ATTRIBUTE values[:] = [] else: @@ -424,20 +457,29 @@ class FakeLdap(common.LDAPHandler): try: values.remove(val) except ValueError: - LOG.debug('modify item failed: ' - 'item has no attribute "%(k)s" with ' - 'value "%(v)s" to delete', { - 'k': k, 'v': val}) + LOG.debug( + 'modify item failed: ' + 'item has no attribute "%(k)s" with ' + 'value "%(v)s" to delete', + {'k': k, 'v': val}, + ) raise ldap.NO_SUCH_ATTRIBUTE else: LOG.debug('modify item failed: unknown command %s', cmd) - raise NotImplementedError('modify_s action %s not' - ' implemented' % cmd) + raise NotImplementedError( + 'modify_s action %s not' ' implemented' % cmd + ) self.db[key] = entry self.db.sync() - def search_s(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): + def search_s( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + ): """Search for all matching objects under base using the query. Args: @@ -451,8 +493,9 @@ class FakeLdap(common.LDAPHandler): raise ldap.SERVER_DOWN if (not filterstr) and (scope != ldap.SCOPE_BASE): - raise AssertionError('Search without filter on onelevel or ' - 'subtree scope') + raise AssertionError( + 'Search without filter on onelevel or ' 'subtree scope' + ) if scope == ldap.SCOPE_BASE: try: @@ -475,11 +518,13 @@ class FakeLdap(common.LDAPHandler): LOG.debug('search fail: dn not found for SCOPE_SUBTREE') raise ldap.NO_SUCH_OBJECT results = [(base, item_dict)] - extraresults = [(k[len(self.__prefix):], v) - for k, v in self.db.items() - if re.match('%s.*,%s' % - (re.escape(self.__prefix), - re.escape(base)), k)] + extraresults = [ + (k[len(self.__prefix) :], v) + for k, v in self.db.items() + if re.match( + '%s.*,%s' % (re.escape(self.__prefix), re.escape(base)), k + ) + ] results.extend(extraresults) elif scope == ldap.SCOPE_ONELEVEL: @@ -490,7 +535,7 @@ class FakeLdap(common.LDAPHandler): for k, v in self.db.items(): if not k.startswith(self.__prefix): continue - k_dn_str = k[len(self.__prefix):] + k_dn_str = k[len(self.__prefix) :] k_dn = ldap.dn.str2dn(k_dn_str) if len(k_dn) != base_len + 1: continue @@ -511,16 +556,21 @@ class FakeLdap(common.LDAPHandler): match_attrs = attrs.copy() match_attrs[id_attr] = [id_val] attrs_checked = set() - if not filterstr or _match_query(filterstr, - match_attrs, - attrs_checked): - if (filterstr and - (scope != ldap.SCOPE_BASE) and - ('objectclass' not in attrs_checked)): + if not filterstr or _match_query( + filterstr, match_attrs, attrs_checked + ): + if ( + filterstr + and (scope != ldap.SCOPE_BASE) + and ('objectclass' not in attrs_checked) + ): raise AssertionError('No objectClass in search filter') # filter the attributes by attrlist - attrs = {k: v for k, v in attrs.items() - if not attrlist or k in attrlist} + attrs = { + k: v + for k, v in attrs.items() + if not attrlist or k in attrlist + } objects.append((dn, attrs)) return objects @@ -532,10 +582,18 @@ class FakeLdap(common.LDAPHandler): value = self._ldap_options.get(option) return value - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): + def search_ext( + self, + base, + scope, + filterstr='(objectClass=*)', + attrlist=None, + attrsonly=0, + serverctrls=None, + clientctrls=None, + timeout=-1, + sizelimit=0, + ): if clientctrls is not None or timeout != -1 or sizelimit != 0: raise exception.NotImplemented() @@ -548,12 +606,19 @@ class FakeLdap(common.LDAPHandler): # storing the request in a variable with random integer key and # performing the real lookup in result3() msgid = random.randint(0, 1000) - PendingRequests[msgid] = (base, scope, filterstr, attrlist, attrsonly, - serverctrls) + PendingRequests[msgid] = ( + base, + scope, + filterstr, + attrlist, + attrsonly, + serverctrls, + ) return msgid - def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None, - resp_ctrl_classes=None): + def result3( + self, msgid=ldap.RES_ANY, all=1, timeout=None, resp_ctrl_classes=None + ): """Execute async request. Only msgid param is supported. Request info is fetched from global @@ -573,7 +638,7 @@ class FakeLdap(common.LDAPHandler): ctrl = serverctrls[0] if ctrl.size: - rdata = results[:ctrl.size] + rdata = results[: ctrl.size] else: rdata = results @@ -603,8 +668,9 @@ class FakeLdapPool(FakeLdap): def get_lifetime(self): return self._connection_time - def simple_bind_s(self, who=None, cred=None, - serverctrls=None, clientctrls=None): + def simple_bind_s( + self, who=None, cred=None, serverctrls=None, clientctrls=None + ): if self.url.startswith('fakepool://memory'): if self.url not in FakeShelves: FakeShelves[self.url] = FakeShelve() @@ -617,9 +683,12 @@ class FakeLdapPool(FakeLdap): if not cred: cred = 'password' - super(FakeLdapPool, self).simple_bind_s(who=who, cred=cred, - serverctrls=serverctrls, - clientctrls=clientctrls) + super(FakeLdapPool, self).simple_bind_s( + who=who, + cred=cred, + serverctrls=serverctrls, + clientctrls=clientctrls, + ) def unbind_ext_s(self): """Added to extend FakeLdap as connector class.""" @@ -647,6 +716,6 @@ class FakeLdapNoSubtreeDelete(FakeLdap): except KeyError: LOG.debug('delete item failed: dn=%s not found.', dn) raise ldap.NO_SUCH_OBJECT - super(FakeLdapNoSubtreeDelete, self).delete_ext_s(dn, - serverctrls, - clientctrls) + super(FakeLdapNoSubtreeDelete, self).delete_ext_s( + dn, serverctrls, clientctrls + ) diff --git a/keystone/tests/unit/federation/test_core.py b/keystone/tests/unit/federation/test_core.py index 3a65b69c6e..fc4a592a0d 100644 --- a/keystone/tests/unit/federation/test_core.py +++ b/keystone/tests/unit/federation/test_core.py @@ -29,11 +29,12 @@ class TestFederationProtocol(unit.TestCase): self.useFixture(database.Database()) self.load_backends() PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) self.idp = { 'id': uuid.uuid4().hex, 'enabled': True, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } PROVIDERS.federation_api.create_idp(self.idp['id'], self.idp) self.mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER @@ -43,43 +44,38 @@ class TestFederationProtocol(unit.TestCase): ) def test_create_protocol(self): - protocol = { - 'id': uuid.uuid4().hex, - 'mapping_id': self.mapping['id'] - } + protocol = {'id': uuid.uuid4().hex, 'mapping_id': self.mapping['id']} protocol_ret = PROVIDERS.federation_api.create_protocol( self.idp['id'], protocol['id'], protocol ) self.assertEqual(protocol['id'], protocol_ret['id']) def test_create_protocol_with_invalid_mapping_id(self): - protocol = { - 'id': uuid.uuid4().hex, - 'mapping_id': uuid.uuid4().hex - } - self.assertRaises(exception.ValidationError, - PROVIDERS.federation_api.create_protocol, - self.idp['id'], - protocol['id'], - protocol) + protocol = {'id': uuid.uuid4().hex, 'mapping_id': uuid.uuid4().hex} + self.assertRaises( + exception.ValidationError, + PROVIDERS.federation_api.create_protocol, + self.idp['id'], + protocol['id'], + protocol, + ) def test_create_protocol_with_remote_id_attribute(self): protocol = { 'id': uuid.uuid4().hex, 'mapping_id': self.mapping['id'], - 'remote_id_attribute': uuid.uuid4().hex + 'remote_id_attribute': uuid.uuid4().hex, } protocol_ret = PROVIDERS.federation_api.create_protocol( self.idp['id'], protocol['id'], protocol ) - self.assertEqual(protocol['remote_id_attribute'], - protocol_ret['remote_id_attribute']) + self.assertEqual( + protocol['remote_id_attribute'], + protocol_ret['remote_id_attribute'], + ) def test_update_protocol(self): - protocol = { - 'id': uuid.uuid4().hex, - 'mapping_id': self.mapping['id'] - } + protocol = {'id': uuid.uuid4().hex, 'mapping_id': self.mapping['id']} protocol_ret = PROVIDERS.federation_api.create_protocol( self.idp['id'], protocol['id'], protocol ) @@ -95,26 +91,22 @@ class TestFederationProtocol(unit.TestCase): self.assertEqual(new_mapping['id'], protocol_ret['mapping_id']) def test_update_protocol_with_invalid_mapping_id(self): - protocol = { - 'id': uuid.uuid4().hex, - 'mapping_id': self.mapping['id'] - } + protocol = {'id': uuid.uuid4().hex, 'mapping_id': self.mapping['id']} protocol_ret = PROVIDERS.federation_api.create_protocol( self.idp['id'], protocol['id'], protocol ) self.assertEqual(protocol['id'], protocol_ret['id']) protocol['mapping_id'] = uuid.uuid4().hex - self.assertRaises(exception.ValidationError, - PROVIDERS.federation_api.update_protocol, - self.idp['id'], - protocol['id'], - protocol) + self.assertRaises( + exception.ValidationError, + PROVIDERS.federation_api.update_protocol, + self.idp['id'], + protocol['id'], + protocol, + ) def test_update_protocol_with_remote_id_attribute(self): - protocol = { - 'id': uuid.uuid4().hex, - 'mapping_id': self.mapping['id'] - } + protocol = {'id': uuid.uuid4().hex, 'mapping_id': self.mapping['id']} protocol_ret = PROVIDERS.federation_api.create_protocol( self.idp['id'], protocol['id'], protocol ) @@ -123,5 +115,7 @@ class TestFederationProtocol(unit.TestCase): protocol_ret = PROVIDERS.federation_api.update_protocol( self.idp['id'], protocol['id'], protocol ) - self.assertEqual(protocol['remote_id_attribute'], - protocol_ret['remote_id_attribute']) + self.assertEqual( + protocol['remote_id_attribute'], + protocol_ret['remote_id_attribute'], + ) diff --git a/keystone/tests/unit/federation/test_utils.py b/keystone/tests/unit/federation/test_utils.py index f6b4aba0b4..3de170e735 100644 --- a/keystone/tests/unit/federation/test_utils.py +++ b/keystone/tests/unit/federation/test_utils.py @@ -30,62 +30,47 @@ class TestFederationUtils(unit.TestCase): "rules": [ { "remote": [ - { - "type": "OIDC-preferred_username" - }, - { - "type": "OIDC-email" - }, - { - "type": "OIDC-openstack-user-domain" - }, - { - "type": "OIDC-openstack-default-project" - }, + {"type": "OIDC-preferred_username"}, + {"type": "OIDC-email"}, + {"type": "OIDC-openstack-user-domain"}, + {"type": "OIDC-openstack-default-project"}, { "type": "OIDC-openstack-user-status", - "any_one_of": [ - "local" - ] + "any_one_of": ["local"], }, ], "local": [ { - "domain": { - "name": "{2}" - }, + "domain": {"name": "{2}"}, "user": { - "domain": { - "name": "{2}" - }, + "domain": {"name": "{2}"}, "type": "local", "name": "{0}", - "email": "{1}" + "email": "{1}", }, "projects": [ - { - "name": "{3}", - "roles": [ - { - "name": "member" - } - ] - } - ] + {"name": "{3}", "roles": [{"name": "member"}]} + ], } - ] - }] + ], + } + ], } self.attribute_mapping_schema_2_0 = copy.deepcopy( - self.attribute_mapping_schema_1_0) + self.attribute_mapping_schema_1_0 + ) self.attribute_mapping_schema_2_0['schema_version'] = '2.0' self.attribute_mapping_schema_2_0['rules'][0]['local'][0]["projects"][ - 0]['domain'] = {"name": "{some_place_holder}"} + 0 + ]['domain'] = {"name": "{some_place_holder}"} self.rule_processor = utils.RuleProcessor( - self.mapping_id_mock, self.attribute_mapping_schema_1_0) - self.rule_processor_schema_2_0 =\ + self.mapping_id_mock, self.attribute_mapping_schema_1_0 + ) + self.rule_processor_schema_2_0 = ( utils.RuleProcessorToHonorDomainOption( - self.mapping_id_mock, self.attribute_mapping_schema_2_0) + self.mapping_id_mock, self.attribute_mapping_schema_2_0 + ) + ) def test_validate_mapping_structure_schema1_0(self): utils.validate_mapping_structure(self.attribute_mapping_schema_1_0) @@ -100,8 +85,12 @@ class TestFederationUtils(unit.TestCase): def test_normalize_user_unexpected_type(self): user = {'type': "weird-type"} - self.assertRaises(ValidationError, self.rule_processor.normalize_user, - user, self.domain_mock) + self.assertRaises( + ValidationError, + self.rule_processor.normalize_user, + user, + self.domain_mock, + ) def test_normalize_user_type_local(self): user = {'type': utils.UserType.LOCAL} @@ -118,8 +107,8 @@ class TestFederationUtils(unit.TestCase): group_by_domain = {self.domain_id_mock: [group1]} result = utils.RuleProcessor( - self.mapping_id_mock, - self.attribute_mapping_schema_1_0).extract_groups(group_by_domain) + self.mapping_id_mock, self.attribute_mapping_schema_1_0 + ).extract_groups(group_by_domain) self.assertEqual([group1], list(result)) @@ -128,7 +117,8 @@ class TestFederationUtils(unit.TestCase): group1 = {'name': "group1", 'domain': domain} group_by_domain = {} result = self.rule_processor.process_group_by_name( - group1, group_by_domain) + group1, group_by_domain + ) self.assertEqual([group1], list(result)) self.assertEqual([domain["name"]], list(group_by_domain.keys())) @@ -136,7 +126,8 @@ class TestFederationUtils(unit.TestCase): group1 = {'name': "group1", 'domain': self.domain_mock} group_by_domain = {} result = self.rule_processor.process_group_by_name( - group1, group_by_domain) + group1, group_by_domain + ) self.assertEqual([group1], list(result)) self.assertEqual([self.domain_id_mock], list(group_by_domain.keys())) @@ -145,7 +136,8 @@ class TestFederationUtils(unit.TestCase): group1 = {'name': "group1", 'domain': self.domain_mock} group_by_domain = {} result = self.rule_processor.process_group_by_name( - group1, group_by_domain) + group1, group_by_domain + ) self.assertEqual([group1], list(result)) self.assertEqual(["domain1"], list(group_by_domain.keys())) @@ -154,7 +146,8 @@ class TestFederationUtils(unit.TestCase): group2 = {'name': "group2", 'domain': self.domain_mock} group_by_domain = {self.domain_id_mock: [group1]} result = self.rule_processor.process_group_by_name( - group2, group_by_domain) + group2, group_by_domain + ) self.assertEqual([group1, group2], list(result)) self.assertEqual([self.domain_id_mock], list(group_by_domain.keys())) @@ -164,10 +157,12 @@ class TestFederationUtils(unit.TestCase): group2 = {'name': "group2", 'domain': self.domain_mock} group_by_domain = {"domain1": [group1]} result = self.rule_processor.process_group_by_name( - group2, group_by_domain) + group2, group_by_domain + ) self.assertEqual([group1, group2], list(result)) - self.assertEqual(["domain1", self.domain_id_mock], - list(group_by_domain.keys())) + self.assertEqual( + ["domain1", self.domain_id_mock], list(group_by_domain.keys()) + ) def test_rule_processor_extract_projects_schema1_0_no_projects(self): result = self.rule_processor.extract_projects({}) @@ -187,22 +182,27 @@ class TestFederationUtils(unit.TestCase): projects_list = [{'name': "project1", 'domain': self.domain_mock}] identity_values = {'projects': projects_list} result = self.rule_processor_schema_2_0.extract_projects( - identity_values) + identity_values + ) self.assertEqual(projects_list, result) def test_rule_processor_extract_projects_schema2_0_no_domain(self): projects_list = [{'name': "project1"}] identity_values = {'projects': projects_list} result = self.rule_processor_schema_2_0.extract_projects( - identity_values) + identity_values + ) self.assertEqual(projects_list, result) def test_rule_processor_extract_projects_schema2_0_no_domain_project(self): project = {'name': "project1"} - identity_values = {'projects': [project.copy()], - 'domain': self.domain_mock} + identity_values = { + 'projects': [project.copy()], + 'domain': self.domain_mock, + } result = self.rule_processor_schema_2_0.extract_projects( - identity_values) + identity_values + ) expected_project = project.copy() expected_project['domain'] = self.domain_mock self.assertEqual([expected_project], result) @@ -214,9 +214,12 @@ class TestFederationUtils(unit.TestCase): def test_normalize_user_unexpected_type_schema_2_0(self): user = {'type': "weird-type"} - self.assertRaises(ValidationError, - self.rule_processor_schema_2_0.normalize_user, user, - self.domain_mock) + self.assertRaises( + ValidationError, + self.rule_processor_schema_2_0.normalize_user, + user, + self.domain_mock, + ) def test_normalize_user_type_local_schema_2_0(self): user = {'type': utils.UserType.LOCAL} @@ -236,15 +239,18 @@ class TestFederationUtils(unit.TestCase): def test_create_attribute_mapping_rules_processor_default(self): result = utils.create_attribute_mapping_rules_processor( - self.attribute_mapping_schema_1_0) + self.attribute_mapping_schema_1_0 + ) self.assertIsInstance(result, utils.RuleProcessor) def test_create_attribute_mapping_rules_processor_schema1_0(self): result = utils.create_attribute_mapping_rules_processor( - self.attribute_mapping_schema_1_0) + self.attribute_mapping_schema_1_0 + ) self.assertIsInstance(result, utils.RuleProcessor) def test_create_attribute_mapping_rules_processor_schema2_0(self): result = utils.create_attribute_mapping_rules_processor( - self.attribute_mapping_schema_2_0) + self.attribute_mapping_schema_2_0 + ) self.assertIsInstance(result, utils.RuleProcessorToHonorDomainOption) diff --git a/keystone/tests/unit/filtering.py b/keystone/tests/unit/filtering.py index 8b9a29a359..b40607fbbf 100644 --- a/keystone/tests/unit/filtering.py +++ b/keystone/tests/unit/filtering.py @@ -27,9 +27,14 @@ class FilterTests(object): # Provide support for checking if a batch of list items all # exist within a contiguous range in a total list - def _match_with_list(self, this_batch, total_list, - batch_size=None, - list_start=None, list_end=None): + def _match_with_list( + self, + this_batch, + total_list, + batch_size=None, + list_start=None, + list_end=None, + ): if batch_size is None: batch_size = len(this_batch) if list_start is None: @@ -93,8 +98,7 @@ class FilterTests(object): return f def _create_one_entity(self, entity_type, domain_id, name): - new_entity = {'name': name, - 'domain_id': domain_id} + new_entity = {'name': name, 'domain_id': domain_id} if entity_type in ['user', 'group']: # The manager layer creates the ID for users and groups new_entity = self._create_entity(entity_type)(new_entity) @@ -103,8 +107,9 @@ class FilterTests(object): self._create_entity(entity_type)(new_entity['id'], new_entity) return new_entity - def _create_test_data(self, entity_type, number, domain_id=None, - name_dict=None): + def _create_test_data( + self, entity_type, number, domain_id=None, name_dict=None + ): """Create entity test data. :param entity_type: type of entity to create, e.g. 'user', group' etc. diff --git a/keystone/tests/unit/identity/backends/test_base.py b/keystone/tests/unit/identity/backends/test_base.py index 17aee984c5..2f21ead811 100644 --- a/keystone/tests/unit/identity/backends/test_base.py +++ b/keystone/tests/unit/identity/backends/test_base.py @@ -75,25 +75,23 @@ class IdentityDriverTests(object): return self.driver.create_group(group_id, group) def test_is_domain_aware(self): - self.assertIs(self.expected_is_domain_aware, - self.driver.is_domain_aware()) + self.assertIs( + self.expected_is_domain_aware, self.driver.is_domain_aware() + ) def test_is_sql(self): self.assertIs(self.expected_is_sql, self.driver.is_sql) def test_generates_uuids(self): - self.assertIs(self.expected_generates_uuids, - self.driver.generates_uuids()) + self.assertIs( + self.expected_generates_uuids, self.driver.generates_uuids() + ) def test_create_user(self): # Don't use self.create_user since this needs to test the driver # interface and create_user might not use the driver. user_id = uuid.uuid4().hex - user = { - 'id': user_id, - 'name': uuid.uuid4().hex, - 'enabled': True - } + user = {'id': user_id, 'name': uuid.uuid4().hex, 'enabled': True} if self.driver.is_domain_aware(): user['domain_id'] = uuid.uuid4().hex ret = self.driver.create_user(user_id, user) @@ -108,7 +106,7 @@ class IdentityDriverTests(object): 'enabled': True, 'default_project_id': uuid.uuid4().hex, 'password_expires_at': None, - 'options': {} + 'options': {}, } if self.driver.is_domain_aware(): user['domain_id'] = uuid.uuid4().hex @@ -127,8 +125,9 @@ class IdentityDriverTests(object): if self.driver.is_domain_aware(): user['domain_id'] = uuid.uuid4().hex self.driver.create_user(user_id, user) - self.assertRaises(exception.Conflict, - self.driver.create_user, user_id, user) + self.assertRaises( + exception.Conflict, self.driver.create_user, user_id, user + ) def test_create_user_same_name_and_domain_exc(self): user1_id = uuid.uuid4().hex @@ -151,8 +150,9 @@ class IdentityDriverTests(object): } if self.driver.is_domain_aware(): user['domain_id'] = domain_id - self.assertRaises(exception.Conflict, - self.driver.create_user, user2_id, user) + self.assertRaises( + exception.Conflict, self.driver.create_user, user2_id, user + ) def test_list_users_no_users(self): hints = driver_hints.Hints() @@ -172,8 +172,9 @@ class IdentityDriverTests(object): self.assertEqual(user['id'], actual_user['id']) def test_get_user_no_user_exc(self): - self.assertRaises(exception.UserNotFound, - self.driver.get_user, uuid.uuid4().hex) + self.assertRaises( + exception.UserNotFound, self.driver.get_user, uuid.uuid4().hex + ) def test_get_user_by_name(self): domain_id = uuid.uuid4().hex @@ -185,8 +186,11 @@ class IdentityDriverTests(object): def test_get_user_by_name_no_user_exc(self): # When the user doesn't exist, UserNotFound is raised. self.assertRaises( - exception.UserNotFound, self.driver.get_user_by_name, - user_name=uuid.uuid4().hex, domain_id=uuid.uuid4().hex) + exception.UserNotFound, + self.driver.get_user_by_name, + user_name=uuid.uuid4().hex, + domain_id=uuid.uuid4().hex, + ) def test_update_user(self): user = self.create_user() @@ -217,14 +221,16 @@ class IdentityDriverTests(object): user2 = self.create_user(domain_id=domain_id) user_mod = {'name': user2['name']} - self.assertRaises(exception.Conflict, self.driver.update_user, - user1['id'], user_mod) + self.assertRaises( + exception.Conflict, self.driver.update_user, user1['id'], user_mod + ) def test_update_user_no_user_exc(self): user_id = uuid.uuid4().hex user_mod = {'enabled': False} - self.assertRaises(exception.UserNotFound, - self.driver.update_user, user_id, user_mod) + self.assertRaises( + exception.UserNotFound, self.driver.update_user, user_id, user_mod + ) def test_update_user_name_not_allowed_exc(self): # For drivers that do not allow name update, attempting to change the @@ -235,8 +241,9 @@ class IdentityDriverTests(object): user = self.create_user() user_mod = {'name': uuid.uuid4().hex} - self.assertRaises(exception.Conflict, self.driver.update_user, - user['id'], user_mod) + self.assertRaises( + exception.Conflict, self.driver.update_user, user['id'], user_mod + ) def test_change_password(self): if not self.allows_self_service_change_password: @@ -254,13 +261,17 @@ class IdentityDriverTests(object): user = self.create_user() self.driver.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, self.driver.get_user, - user['id']) + self.assertRaises( + exception.UserNotFound, self.driver.get_user, user['id'] + ) def test_delete_user_no_user_exc(self): # When the user doesn't exist, UserNotFound is raised. - self.assertRaises(exception.UserNotFound, self.driver.delete_user, - user_id=uuid.uuid4().hex) + self.assertRaises( + exception.UserNotFound, + self.driver.delete_user, + user_id=uuid.uuid4().hex, + ) def test_create_group(self): group_id = uuid.uuid4().hex @@ -304,8 +315,9 @@ class IdentityDriverTests(object): } if self.driver.is_domain_aware(): group2['domain_id'] = domain - self.assertRaises(exception.Conflict, self.driver.create_group, - group2_id, group2) + self.assertRaises( + exception.Conflict, self.driver.create_group, group2_id, group2 + ) def test_get_group(self): group = self.create_group() @@ -315,8 +327,11 @@ class IdentityDriverTests(object): def test_get_group_no_group_exc(self): # When the group doesn't exist, get_group raises GroupNotFound. - self.assertRaises(exception.GroupNotFound, self.driver.get_group, - group_id=uuid.uuid4().hex) + self.assertRaises( + exception.GroupNotFound, + self.driver.get_group, + group_id=uuid.uuid4().hex, + ) def test_get_group_by_name(self): domain_id = uuid.uuid4().hex @@ -328,8 +343,11 @@ class IdentityDriverTests(object): def test_get_group_by_name_no_user_exc(self): # When the group doesn't exist, get_group raises GroupNotFound. self.assertRaises( - exception.GroupNotFound, self.driver.get_group_by_name, - group_name=uuid.uuid4().hex, domain_id=uuid.uuid4().hex) + exception.GroupNotFound, + self.driver.get_group_by_name, + group_name=uuid.uuid4().hex, + domain_id=uuid.uuid4().hex, + ) def test_update_group(self): group = self.create_group() @@ -342,8 +360,12 @@ class IdentityDriverTests(object): def test_update_group_no_group(self): # When the group doesn't exist, GroupNotFound is raised. group_mod = {'description': uuid.uuid4().hex} - self.assertRaises(exception.GroupNotFound, self.driver.update_group, - group_id=uuid.uuid4().hex, group=group_mod) + self.assertRaises( + exception.GroupNotFound, + self.driver.update_group, + group_id=uuid.uuid4().hex, + group=group_mod, + ) def test_update_group_name_already_exists(self): # For drivers that support renaming, when the group is renamed to a @@ -357,8 +379,12 @@ class IdentityDriverTests(object): group2 = self.create_group(domain_id=domain_id) group_mod = {'name': group1['name']} - self.assertRaises(exception.Conflict, self.driver.update_group, - group2['id'], group_mod) + self.assertRaises( + exception.Conflict, + self.driver.update_group, + group2['id'], + group_mod, + ) def test_update_group_name_not_allowed(self): # For drivers that do not support renaming, when the group is attempted @@ -370,18 +396,26 @@ class IdentityDriverTests(object): group = self.create_group() group_mod = {'name': uuid.uuid4().hex} - self.assertRaises(exception.ValidationError, self.driver.update_group, - group['id'], group_mod) + self.assertRaises( + exception.ValidationError, + self.driver.update_group, + group['id'], + group_mod, + ) def test_delete_group(self): group = self.create_group() self.driver.delete_group(group['id']) - self.assertRaises(exception.GroupNotFound, self.driver.get_group, - group['id']) + self.assertRaises( + exception.GroupNotFound, self.driver.get_group, group['id'] + ) def test_delete_group_doesnt_exist_exc(self): - self.assertRaises(exception.GroupNotFound, self.driver.delete_group, - group_id=uuid.uuid4().hex) + self.assertRaises( + exception.GroupNotFound, + self.driver.delete_group, + group_id=uuid.uuid4().hex, + ) def test_list_groups_no_groups(self): groups = self.driver.list_groups(driver_hints.Hints()) @@ -405,15 +439,23 @@ class IdentityDriverTests(object): group = self.create_group() user_id = uuid.uuid4().hex - self.assertRaises(exception.UserNotFound, - self.driver.add_user_to_group, user_id, group['id']) + self.assertRaises( + exception.UserNotFound, + self.driver.add_user_to_group, + user_id, + group['id'], + ) def test_add_user_to_group_no_group_exc(self): user = self.create_user() group_id = uuid.uuid4().hex - self.assertRaises(exception.GroupNotFound, - self.driver.add_user_to_group, user['id'], group_id) + self.assertRaises( + exception.GroupNotFound, + self.driver.add_user_to_group, + user['id'], + group_id, + ) def test_check_user_in_group(self): user = self.create_user() @@ -427,8 +469,12 @@ class IdentityDriverTests(object): user = self.create_user() group = self.create_group() - self.assertRaises(exception.NotFound, self.driver.check_user_in_group, - user['id'], group['id']) + self.assertRaises( + exception.NotFound, + self.driver.check_user_in_group, + user['id'], + group['id'], + ) def test_check_user_in_group_user_doesnt_exist_exc(self): # When the user doesn't exist, UserNotFound is raised. @@ -436,8 +482,11 @@ class IdentityDriverTests(object): user_id = uuid.uuid4().hex self.assertRaises( - exception.UserNotFound, self.driver.check_user_in_group, user_id, - group['id']) + exception.UserNotFound, + self.driver.check_user_in_group, + user_id, + group['id'], + ) def test_check_user_in_group_group_doesnt_exist_exc(self): # When the group doesn't exist, UserNotFound is raised. @@ -445,14 +494,18 @@ class IdentityDriverTests(object): group_id = uuid.uuid4().hex self.assertRaises( - exception.GroupNotFound, self.driver.check_user_in_group, - user['id'], group_id) + exception.GroupNotFound, + self.driver.check_user_in_group, + user['id'], + group_id, + ) def test_list_users_in_group_no_users(self): group = self.create_group() - users = self.driver.list_users_in_group(group['id'], - driver_hints.Hints()) + users = self.driver.list_users_in_group( + group['id'], driver_hints.Hints() + ) self.assertEqual([], users) def test_list_users_in_group_user(self): @@ -460,21 +513,26 @@ class IdentityDriverTests(object): user = self.create_user() self.driver.add_user_to_group(user['id'], group['id']) - users = self.driver.list_users_in_group(group['id'], - driver_hints.Hints()) + users = self.driver.list_users_in_group( + group['id'], driver_hints.Hints() + ) self.assertEqual([user['id']], [u['id'] for u in users]) def test_list_users_in_group_no_group(self): group_id = uuid.uuid4().hex self.assertRaises( - exception.GroupNotFound, self.driver.list_users_in_group, group_id, - driver_hints.Hints()) + exception.GroupNotFound, + self.driver.list_users_in_group, + group_id, + driver_hints.Hints(), + ) def test_list_groups_for_user_no_groups(self): user = self.create_user() - groups = self.driver.list_groups_for_user(user['id'], - driver_hints.Hints()) + groups = self.driver.list_groups_for_user( + user['id'], driver_hints.Hints() + ) self.assertEqual([], groups) def test_list_groups_for_user_group(self): @@ -482,15 +540,19 @@ class IdentityDriverTests(object): group = self.create_group() self.driver.add_user_to_group(user['id'], group['id']) - groups = self.driver.list_groups_for_user(user['id'], - driver_hints.Hints()) + groups = self.driver.list_groups_for_user( + user['id'], driver_hints.Hints() + ) self.assertEqual([group['id']], [g['id'] for g in groups]) def test_list_groups_for_user_no_user(self): user_id = uuid.uuid4().hex self.assertRaises( - exception.UserNotFound, self.driver.list_groups_for_user, - user_id, driver_hints.Hints()) + exception.UserNotFound, + self.driver.list_groups_for_user, + user_id, + driver_hints.Hints(), + ) def test_remove_user_from_group(self): user = self.create_user() @@ -499,8 +561,12 @@ class IdentityDriverTests(object): self.driver.remove_user_from_group(user['id'], group['id']) - self.assertRaises(exception.NotFound, self.driver.check_user_in_group, - user['id'], group['id']) + self.assertRaises( + exception.NotFound, + self.driver.check_user_in_group, + user['id'], + group['id'], + ) def test_remove_user_from_group_not_in_group(self): user = self.create_user() @@ -509,24 +575,33 @@ class IdentityDriverTests(object): # FIXME(blk-u): ldap is returning UserNotFound rather than NotFound, # fix this. self.assertRaises( - exception.NotFound, self.driver.remove_user_from_group, user['id'], - group['id']) + exception.NotFound, + self.driver.remove_user_from_group, + user['id'], + group['id'], + ) def test_remove_user_from_group_no_user(self): group = self.create_group() user_id = uuid.uuid4().hex self.assertRaises( - exception.UserNotFound, self.driver.remove_user_from_group, - user_id, group['id']) + exception.UserNotFound, + self.driver.remove_user_from_group, + user_id, + group['id'], + ) def test_remove_user_from_group_no_group(self): user = self.create_user() group_id = uuid.uuid4().hex self.assertRaises( - exception.GroupNotFound, self.driver.remove_user_from_group, - user['id'], group_id) + exception.GroupNotFound, + self.driver.remove_user_from_group, + user['id'], + group_id, + ) def test_authenticate(self): password = uuid.uuid4().hex @@ -539,11 +614,13 @@ class IdentityDriverTests(object): user = self.create_user(password=uuid.uuid4().hex) password = uuid.uuid4().hex - self.assertRaises(AssertionError, self.driver.authenticate, user['id'], - password) + self.assertRaises( + AssertionError, self.driver.authenticate, user['id'], password + ) def test_authenticate_no_user(self): user_id = uuid.uuid4().hex password = uuid.uuid4().hex - self.assertRaises(AssertionError, self.driver.authenticate, user_id, - password) + self.assertRaises( + AssertionError, self.driver.authenticate, user_id, password + ) diff --git a/keystone/tests/unit/identity/backends/test_ldap.py b/keystone/tests/unit/identity/backends/test_ldap.py index b0cc8dd296..99cc3af9da 100644 --- a/keystone/tests/unit/identity/backends/test_ldap.py +++ b/keystone/tests/unit/identity/backends/test_ldap.py @@ -18,8 +18,7 @@ from keystone.tests.unit.identity.backends import test_base from keystone.tests.unit.ksfixtures import ldapdb -class TestIdentityDriver(core.BaseTestCase, - test_base.IdentityDriverTests): +class TestIdentityDriver(core.BaseTestCase, test_base.IdentityDriverTests): allows_name_update = False allows_self_service_change_password = False @@ -37,7 +36,8 @@ class TestIdentityDriver(core.BaseTestCase, url='fake://memory', user='cn=Admin', password='password', - suffix='cn=example,cn=com') + suffix='cn=example,cn=com', + ) self.useFixture(ldapdb.LDAPDatabase()) diff --git a/keystone/tests/unit/identity/backends/test_ldap_common.py b/keystone/tests/unit/identity/backends/test_ldap_common.py index a4d4335f0b..ce1d749314 100644 --- a/keystone/tests/unit/identity/backends/test_ldap_common.py +++ b/keystone/tests/unit/identity/backends/test_ldap_common.py @@ -210,7 +210,8 @@ class LDAPDeleteTreeTest(unit.TestCase): super(LDAPDeleteTreeTest, self).setUp() self.useFixture( - ldapdb.LDAPDatabase(dbclass=fakeldap.FakeLdapNoSubtreeDelete)) + ldapdb.LDAPDatabase(dbclass=fakeldap.FakeLdapNoSubtreeDelete) + ) self.useFixture(database.Database()) self.load_backends() @@ -247,13 +248,14 @@ class MultiURLTests(unit.TestCase): @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'simple_bind_s') def test_multiple_urls_with_comma_randomized(self, mock_ldap_bind): - urls = ('ldap://localhost1,ldap://localhost2,' - 'ldap://localhost3,ldap://localhost4,' - 'ldap://localhost5,ldap://localhost6,' - 'ldap://localhost7,ldap://localhost8,' - 'ldap://localhost9,ldap://localhost0') - self.config_fixture.config(group='ldap', url=urls, - randomize_urls=True) + urls = ( + 'ldap://localhost1,ldap://localhost2,' + 'ldap://localhost3,ldap://localhost4,' + 'ldap://localhost5,ldap://localhost6,' + 'ldap://localhost7,ldap://localhost8,' + 'ldap://localhost9,ldap://localhost0' + ) + self.config_fixture.config(group='ldap', url=urls, randomize_urls=True) base_ldap = common_ldap.BaseLdap(CONF) ldap_connection = base_ldap.get_connection() @@ -261,16 +263,21 @@ class MultiURLTests(unit.TestCase): self.assertEqual(len(urls.split(',')), 10) # Check that the list is split into the same number of URIs - self.assertEqual(len(urls.split(',')), - len(ldap_connection.conn.conn_pool.uri.split(','))) + self.assertEqual( + len(urls.split(',')), + len(ldap_connection.conn.conn_pool.uri.split(',')), + ) # Check that the list is randomized - self.assertNotEqual(urls.split(','), - ldap_connection.conn.conn_pool.uri.split(',')) + self.assertNotEqual( + urls.split(','), ldap_connection.conn.conn_pool.uri.split(',') + ) # Check that the list contains the same URIs - self.assertEqual(set(urls.split(',')), - set(ldap_connection.conn.conn_pool.uri.split(','))) + self.assertEqual( + set(urls.split(',')), + set(ldap_connection.conn.conn_pool.uri.split(',')), + ) class LDAPConnectionTimeoutTest(unit.TestCase): @@ -280,14 +287,17 @@ class LDAPConnectionTimeoutTest(unit.TestCase): def test_connectivity_timeout_no_conn_pool(self, mock_ldap_bind): url = 'ldap://localhost' conn_timeout = 1 # 1 second - self.config_fixture.config(group='ldap', - url=url, - connection_timeout=conn_timeout, - use_pool=False) + self.config_fixture.config( + group='ldap', + url=url, + connection_timeout=conn_timeout, + use_pool=False, + ) base_ldap = common_ldap.BaseLdap(CONF) ldap_connection = base_ldap.get_connection() - self.assertIsInstance(ldap_connection.conn, - common_ldap.PythonLDAPHandler) + self.assertIsInstance( + ldap_connection.conn, common_ldap.PythonLDAPHandler + ) # Ensure that the Network Timeout option is set. # Also ensure that the URL is set. @@ -297,23 +307,27 @@ class LDAPConnectionTimeoutTest(unit.TestCase): # integration testing. If the LDAP option is set properly, # and we get back a valid connection URI then that should # suffice for this unit test. - self.assertEqual(conn_timeout, - ldap.get_option(ldap.OPT_NETWORK_TIMEOUT)) + self.assertEqual( + conn_timeout, ldap.get_option(ldap.OPT_NETWORK_TIMEOUT) + ) self.assertEqual(url, ldap_connection.conn.conn._uri) @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'simple_bind_s') def test_connectivity_timeout_with_conn_pool(self, mock_ldap_bind): url = 'ldap://localhost' conn_timeout = 1 # 1 second - self.config_fixture.config(group='ldap', - url=url, - pool_connection_timeout=conn_timeout, - use_pool=True, - pool_retry_max=1) + self.config_fixture.config( + group='ldap', + url=url, + pool_connection_timeout=conn_timeout, + use_pool=True, + pool_retry_max=1, + ) base_ldap = common_ldap.BaseLdap(CONF) ldap_connection = base_ldap.get_connection() - self.assertIsInstance(ldap_connection.conn, - common_ldap.PooledLDAPHandler) + self.assertIsInstance( + ldap_connection.conn, common_ldap.PooledLDAPHandler + ) # Ensure that the Network Timeout option is set. # Also ensure that the URL is set. @@ -323,8 +337,9 @@ class LDAPConnectionTimeoutTest(unit.TestCase): # integration testing. If the LDAP option is set properly, # and we get back a valid connection URI then that should # suffice for this unit test. - self.assertEqual(conn_timeout, - ldap.get_option(ldap.OPT_NETWORK_TIMEOUT)) + self.assertEqual( + conn_timeout, ldap.get_option(ldap.OPT_NETWORK_TIMEOUT) + ) self.assertEqual(url, ldap_connection.conn.conn_pool.uri) @@ -347,10 +362,12 @@ class SslTlsTest(unit.BaseTestCase): (handle, certfile) = tempfile.mkstemp() self.addCleanup(os.unlink, certfile) self.addCleanup(os.close, handle) - self.config_fixture.config(group='ldap', - url='ldap://localhost', - use_tls=True, - tls_cacertfile=certfile) + self.config_fixture.config( + group='ldap', + url='ldap://localhost', + use_tls=True, + tls_cacertfile=certfile, + ) self._init_ldap_connection(CONF) @@ -360,10 +377,12 @@ class SslTlsTest(unit.BaseTestCase): def test_certdir_trust_tls(self): # We need this to actually exist, so we create a tempdir. certdir = self.useFixture(fixtures.TempDir()).path - self.config_fixture.config(group='ldap', - url='ldap://localhost', - use_tls=True, - tls_cacertdir=certdir) + self.config_fixture.config( + group='ldap', + url='ldap://localhost', + use_tls=True, + tls_cacertdir=certdir, + ) self._init_ldap_connection(CONF) @@ -375,10 +394,12 @@ class SslTlsTest(unit.BaseTestCase): (handle, certfile) = tempfile.mkstemp() self.addCleanup(os.unlink, certfile) self.addCleanup(os.close, handle) - self.config_fixture.config(group='ldap', - url='ldaps://localhost', - use_tls=False, - tls_cacertfile=certfile) + self.config_fixture.config( + group='ldap', + url='ldaps://localhost', + use_tls=False, + tls_cacertfile=certfile, + ) self._init_ldap_connection(CONF) @@ -388,10 +409,12 @@ class SslTlsTest(unit.BaseTestCase): def test_certdir_trust_ldaps(self): # We need this to actually exist, so we create a tempdir. certdir = self.useFixture(fixtures.TempDir()).path - self.config_fixture.config(group='ldap', - url='ldaps://localhost', - use_tls=False, - tls_cacertdir=certdir) + self.config_fixture.config( + group='ldap', + url='ldaps://localhost', + use_tls=False, + tls_cacertdir=certdir, + ) self._init_ldap_connection(CONF) @@ -425,18 +448,18 @@ class LDAPPagedResultsTest(unit.TestCase): def test_paged_results_control_api(self, mock_result3, mock_search_ext): mock_result3.return_value = ('', [], 1, []) - self.config_fixture.config(group='ldap', - page_size=1) + self.config_fixture.config(group='ldap', page_size=1) conn = PROVIDERS.identity_api.user.get_connection() - conn._paged_search_s('dc=example,dc=test', - ldap.SCOPE_SUBTREE, - 'objectclass=*', - ['mail', 'userPassword']) + conn._paged_search_s( + 'dc=example,dc=test', + ldap.SCOPE_SUBTREE, + 'objectclass=*', + ['mail', 'userPassword'], + ) # verify search_ext() args - attrlist is tricky due to ordering args, _ = mock_search_ext.call_args - self.assertEqual( - ('dc=example,dc=test', 2, 'objectclass=*'), args[0:3]) + self.assertEqual(('dc=example,dc=test', 2, 'objectclass=*'), args[0:3]) attrlist = sorted([attr for attr in args[3] if attr]) self.assertEqual(['mail', 'userPassword'], attrlist) @@ -445,15 +468,17 @@ class CommonLdapTestCase(unit.BaseTestCase): """These test cases call functions in keystone.common.ldap.""" def test_binary_attribute_values(self): - result = [( - 'cn=junk,dc=example,dc=com', - { - 'cn': ['junk'], - 'sn': [uuid.uuid4().hex], - 'mail': [uuid.uuid4().hex], - 'binary_attr': [b'\x00\xFF\x00\xFF'] - } - ), ] + result = [ + ( + 'cn=junk,dc=example,dc=com', + { + 'cn': ['junk'], + 'sn': [uuid.uuid4().hex], + 'mail': [uuid.uuid4().hex], + 'binary_attr': [b'\x00\xFF\x00\xFF'], + }, + ), + ] py_result = common_ldap.convert_ldap_result(result) # The attribute containing the binary value should # not be present in the converted result. @@ -475,22 +500,19 @@ class CommonLdapTestCase(unit.BaseTestCase): result_unicode = common_ldap.utf8_decode(value_unicode) self.assertEqual(value_unicode, result_unicode) - self.assertRaises(TypeError, - common_ldap.utf8_encode, - 100) + self.assertRaises(TypeError, common_ldap.utf8_encode, 100) result_unicode = common_ldap.utf8_decode(100) self.assertEqual(u'100', result_unicode) def test_user_id_begins_with_0(self): user_id = '0123456' - result = [( - 'cn=dummy,dc=example,dc=com', - { - 'user_id': [user_id], - 'enabled': ['TRUE'] - } - ), ] + result = [ + ( + 'cn=dummy,dc=example,dc=com', + {'user_id': [user_id], 'enabled': ['TRUE']}, + ), + ] py_result = common_ldap.convert_ldap_result(result) # The user id should be 0123456, and the enabled # flag should be True @@ -501,13 +523,12 @@ class CommonLdapTestCase(unit.BaseTestCase): user_id = '0123456' bitmask = '225' expected_bitmask = 225 - result = [( - 'cn=dummy,dc=example,dc=com', - { - 'user_id': [user_id], - 'enabled': [bitmask] - } - ), ] + result = [ + ( + 'cn=dummy,dc=example,dc=com', + {'user_id': [user_id], 'enabled': [bitmask]}, + ), + ] py_result = common_ldap.convert_ldap_result(result) # The user id should be 0123456, and the enabled # flag should be 225 @@ -518,13 +539,12 @@ class CommonLdapTestCase(unit.BaseTestCase): user_id = '0123456' bitmask = '0225' expected_bitmask = 225 - result = [( - 'cn=dummy,dc=example,dc=com', - { - 'user_id': [user_id], - 'enabled': [bitmask] - } - ), ] + result = [ + ( + 'cn=dummy,dc=example,dc=com', + {'user_id': [user_id], 'enabled': [bitmask]}, + ), + ] py_result = common_ldap.convert_ldap_result(result) # The user id should be 0123456, and the enabled # flag should be 225, the 0 is dropped. @@ -532,31 +552,41 @@ class CommonLdapTestCase(unit.BaseTestCase): self.assertEqual(user_id, py_result[0][1]['user_id'][0]) def test_user_id_and_user_name_with_boolean_string(self): - boolean_strings = ['TRUE', 'FALSE', 'true', 'false', 'True', 'False', - 'TrUe' 'FaLse'] + boolean_strings = [ + 'TRUE', + 'FALSE', + 'true', + 'false', + 'True', + 'False', + 'TrUe' 'FaLse', + ] for user_name in boolean_strings: user_id = uuid.uuid4().hex - result = [( - 'cn=dummy,dc=example,dc=com', - { - 'user_id': [user_id], - 'user_name': [user_name] - } - ), ] + result = [ + ( + 'cn=dummy,dc=example,dc=com', + {'user_id': [user_id], 'user_name': [user_name]}, + ), + ] py_result = common_ldap.convert_ldap_result(result) # The user name should still be a string value. self.assertEqual(user_name, py_result[0][1]['user_name'][0]) def test_user_id_attribute_is_uuid_in_byte_form(self): - results = [( - 'cn=alice,dc=example,dc=com', - { - 'cn': [b'cn=alice'], - 'objectGUID': [b'\xdd\xd8Rt\xee]bA\x8e(\xe39\x0b\xe1\xf8\xe8'], - 'email': [uuid.uuid4().hex], - 'sn': [uuid.uuid4().hex] - } - )] + results = [ + ( + 'cn=alice,dc=example,dc=com', + { + 'cn': [b'cn=alice'], + 'objectGUID': [ + b'\xdd\xd8Rt\xee]bA\x8e(\xe39\x0b\xe1\xf8\xe8' + ], + 'email': [uuid.uuid4().hex], + 'sn': [uuid.uuid4().hex], + }, + ) + ] py_result = common_ldap.convert_ldap_result(results) exp_object_guid = '7452d8dd-5dee-4162-8e28-e3390be1f8e8' self.assertEqual(exp_object_guid, py_result[0][1]['objectGUID'][0]) @@ -583,8 +613,9 @@ class LDAPFilterQueryCompositionTest(unit.BaseTestCase): # NOTE: doesn't have to be a real query, we just need to make sure the # same string is returned if there are no hints. query = uuid.uuid4().hex - self.assertEqual(query, - self.base_ldap.filter_query(hints=hints, query=query)) + self.assertEqual( + query, self.base_ldap.filter_query(hints=hints, query=query) + ) # make sure the default query is an empty string self.assertEqual('', self.base_ldap.filter_query(hints=hints)) @@ -592,14 +623,19 @@ class LDAPFilterQueryCompositionTest(unit.BaseTestCase): def test_filter_with_empty_query_and_hints_set(self): hints = driver_hints.Hints() username = uuid.uuid4().hex - hints.add_filter(name=self.attribute_name, - value=username, - comparator='equals', - case_sensitive=False) + hints.add_filter( + name=self.attribute_name, + value=username, + comparator='equals', + case_sensitive=False, + ) expected_ldap_filter = '(&(%s=%s))' % ( - self.filter_attribute_name, username) - self.assertEqual(expected_ldap_filter, - self.base_ldap.filter_query(hints=hints)) + self.filter_attribute_name, + username, + ) + self.assertEqual( + expected_ldap_filter, self.base_ldap.filter_query(hints=hints) + ) def test_filter_with_both_query_and_hints_set(self): hints = driver_hints.Hints() @@ -608,24 +644,35 @@ class LDAPFilterQueryCompositionTest(unit.BaseTestCase): query = uuid.uuid4().hex username = uuid.uuid4().hex expected_result = '(&%(query)s(%(user_name_attr)s=%(username)s))' % ( - {'query': query, - 'user_name_attr': self.filter_attribute_name, - 'username': username}) + { + 'query': query, + 'user_name_attr': self.filter_attribute_name, + 'username': username, + } + ) hints.add_filter(self.attribute_name, username) - self.assertEqual(expected_result, - self.base_ldap.filter_query(hints=hints, query=query)) + self.assertEqual( + expected_result, + self.base_ldap.filter_query(hints=hints, query=query), + ) def test_filter_with_hints_and_query_is_none(self): hints = driver_hints.Hints() username = uuid.uuid4().hex - hints.add_filter(name=self.attribute_name, - value=username, - comparator='equals', - case_sensitive=False) + hints.add_filter( + name=self.attribute_name, + value=username, + comparator='equals', + case_sensitive=False, + ) expected_ldap_filter = '(&(%s=%s))' % ( - self.filter_attribute_name, username) - self.assertEqual(expected_ldap_filter, - self.base_ldap.filter_query(hints=hints, query=None)) + self.filter_attribute_name, + username, + ) + self.assertEqual( + expected_ldap_filter, + self.base_ldap.filter_query(hints=hints, query=None), + ) class LDAPSizeLimitTest(unit.TestCase): @@ -653,7 +700,9 @@ class LDAPSizeLimitTest(unit.TestCase): def test_search_s_sizelimit_exceeded(self, mock_search_s): mock_search_s.side_effect = ldap.SIZELIMIT_EXCEEDED conn = PROVIDERS.identity_api.user.get_connection() - self.assertRaises(ks_exception.LDAPSizeLimitExceeded, - conn.search_s, - 'dc=example,dc=test', - ldap.SCOPE_SUBTREE) + self.assertRaises( + ks_exception.LDAPSizeLimitExceeded, + conn.search_s, + 'dc=example,dc=test', + ldap.SCOPE_SUBTREE, + ) diff --git a/keystone/tests/unit/identity/backends/test_sql.py b/keystone/tests/unit/identity/backends/test_sql.py index 55b93cbb6d..6bd971e653 100644 --- a/keystone/tests/unit/identity/backends/test_sql.py +++ b/keystone/tests/unit/identity/backends/test_sql.py @@ -20,9 +20,11 @@ from keystone.tests.unit.identity.backends import test_base as id_test_base from keystone.tests.unit.ksfixtures import database -class TestIdentityDriver(db_fixtures.OpportunisticDBTestMixin, - test_base.BaseTestCase, - id_test_base.IdentityDriverTests): +class TestIdentityDriver( + db_fixtures.OpportunisticDBTestMixin, + test_base.BaseTestCase, + id_test_base.IdentityDriverTests, +): expected_is_domain_aware = True expected_default_assignment_driver = 'sql' @@ -36,14 +38,16 @@ class TestIdentityDriver(db_fixtures.OpportunisticDBTestMixin, # Set keystone's connection URL to be the test engine's url. Close # sqlite FK to avoid conflicting with sql upgrade test. - database.initialize_sql_session(self.engine.url, - enforce_sqlite_fks=False) + database.initialize_sql_session( + self.engine.url, enforce_sqlite_fks=False + ) # Override keystone's context manager to be oslo.db's global context # manager. sql.core._TESTING_USE_GLOBAL_CONTEXT_MANAGER = True - self.addCleanup(setattr, - sql.core, '_TESTING_USE_GLOBAL_CONTEXT_MANAGER', False) + self.addCleanup( + setattr, sql.core, '_TESTING_USE_GLOBAL_CONTEXT_MANAGER', False + ) self.addCleanup(sql.cleanup) database._load_sqlalchemy_models() diff --git a/keystone/tests/unit/identity/shadow_users/test_backend.py b/keystone/tests/unit/identity/shadow_users/test_backend.py index 38300443ce..2dc0827558 100644 --- a/keystone/tests/unit/identity/shadow_users/test_backend.py +++ b/keystone/tests/unit/identity/shadow_users/test_backend.py @@ -37,9 +37,11 @@ class ShadowUsersBackendTests(object): self.assertEqual(user_created['name'], user['name']) new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) new_user['name'] = user['name'] - self.assertRaises(exception.Conflict, - PROVIDERS.shadow_users_api.create_nonlocal_user, - new_user) + self.assertRaises( + exception.Conflict, + PROVIDERS.shadow_users_api.create_nonlocal_user, + new_user, + ) def test_create_nonlocal_user_does_not_create_local_user(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -56,10 +58,11 @@ class ShadowUsersBackendTests(object): nonlocal_user = { 'domain_id': CONF.identity.default_domain_id, 'name': uuid.uuid4().hex, - 'user_id': user['id'] + 'user_id': user['id'], } - self.assertRaises(sql.DBDuplicateEntry, self._add_nonlocal_user, - nonlocal_user) + self.assertRaises( + sql.DBDuplicateEntry, self._add_nonlocal_user, nonlocal_user + ) def test_get_user(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -72,67 +75,80 @@ class ShadowUsersBackendTests(object): def test_create_federated_user_unique_constraint(self): user_dict = PROVIDERS.shadow_users_api.create_federated_user( - self.domain_id, self.federated_user) + self.domain_id, self.federated_user + ) user_dict = PROVIDERS.shadow_users_api.get_user(user_dict["id"]) self.assertIsNotNone(user_dict["id"]) - self.assertRaises(exception.Conflict, - PROVIDERS.shadow_users_api.create_federated_user, - self.domain_id, - self.federated_user) + self.assertRaises( + exception.Conflict, + PROVIDERS.shadow_users_api.create_federated_user, + self.domain_id, + self.federated_user, + ) def test_create_federated_user_domain(self): user = PROVIDERS.shadow_users_api.create_federated_user( - self.domain_id, self.federated_user) + self.domain_id, self.federated_user + ) self.assertEqual(user['domain_id'], self.domain_id) def test_create_federated_user_email(self): user = PROVIDERS.shadow_users_api.create_federated_user( - self.domain_id, self.federated_user, self.email) + self.domain_id, self.federated_user, self.email + ) self.assertEqual(user['email'], self.email) def test_get_federated_user(self): user_dict_create = PROVIDERS.shadow_users_api.create_federated_user( - self.domain_id, self.federated_user) + self.domain_id, self.federated_user + ) user_dict_get = PROVIDERS.shadow_users_api.get_federated_user( self.federated_user["idp_id"], self.federated_user["protocol_id"], - self.federated_user["unique_id"]) + self.federated_user["unique_id"], + ) self.assertCountEqual(user_dict_create, user_dict_get) self.assertEqual(user_dict_create["id"], user_dict_get["id"]) def test_update_federated_user_display_name(self): user_dict_create = PROVIDERS.shadow_users_api.create_federated_user( - self.domain_id, self.federated_user) + self.domain_id, self.federated_user + ) new_display_name = uuid.uuid4().hex PROVIDERS.shadow_users_api.update_federated_user_display_name( self.federated_user["idp_id"], self.federated_user["protocol_id"], self.federated_user["unique_id"], - new_display_name) + new_display_name, + ) user_ref = PROVIDERS.shadow_users_api._get_federated_user( self.federated_user["idp_id"], self.federated_user["protocol_id"], - self.federated_user["unique_id"]) - self.assertEqual(user_ref.federated_users[0].display_name, - new_display_name) + self.federated_user["unique_id"], + ) + self.assertEqual( + user_ref.federated_users[0].display_name, new_display_name + ) self.assertEqual(user_dict_create["id"], user_ref.id) def test_set_last_active_at(self): - self.config_fixture.config(group='security_compliance', - disable_user_account_days_inactive=90) + self.config_fixture.config( + group='security_compliance', disable_user_account_days_inactive=90 + ) now = datetime.datetime.utcnow().date() password = uuid.uuid4().hex user = self._create_user(password) with self.make_request(): user_auth = PROVIDERS.identity_api.authenticate( - user_id=user['id'], - password=password) + user_id=user['id'], password=password + ) user_ref = self._get_user_ref(user_auth['id']) self.assertGreaterEqual(now, user_ref.last_active_at) def test_set_last_active_at_on_non_existing_user(self): - self.config_fixture.config(group='security_compliance', - disable_user_account_days_inactive=90) + self.config_fixture.config( + group='security_compliance', disable_user_account_days_inactive=90 + ) password = uuid.uuid4().hex user = self._create_user(password) @@ -146,23 +162,26 @@ class ShadowUsersBackendTests(object): test_self._delete_user(user_id) real_last_active_at(self, user_id) - with mock.patch.object(shadow_sql.ShadowUsers, 'set_last_active_at', - fake_last_active_at): + with mock.patch.object( + shadow_sql.ShadowUsers, 'set_last_active_at', fake_last_active_at + ): with self.make_request(): # the call is expected to just succeed without exceptions PROVIDERS.identity_api.authenticate( - user_id=user['id'], - password=password) + user_id=user['id'], password=password + ) def test_set_last_active_at_when_config_setting_is_none(self): - self.config_fixture.config(group='security_compliance', - disable_user_account_days_inactive=None) + self.config_fixture.config( + group='security_compliance', + disable_user_account_days_inactive=None, + ) password = uuid.uuid4().hex user = self._create_user(password) with self.make_request(): user_auth = PROVIDERS.identity_api.authenticate( - user_id=user['id'], - password=password) + user_id=user['id'], password=password + ) user_ref = self._get_user_ref(user_auth['id']) self.assertIsNone(user_ref.last_active_at) @@ -176,7 +195,7 @@ class ShadowUsersBackendTests(object): 'name': uuid.uuid4().hex, 'domain_id': self.domain_id, 'enabled': True, - 'password': password + 'password': password, } return PROVIDERS.identity_api.create_user(user) diff --git a/keystone/tests/unit/identity/shadow_users/test_core.py b/keystone/tests/unit/identity/shadow_users/test_core.py index 49934c6c5c..51c225488e 100644 --- a/keystone/tests/unit/identity/shadow_users/test_core.py +++ b/keystone/tests/unit/identity/shadow_users/test_core.py @@ -27,8 +27,10 @@ class ShadowUsersCoreTests(object): ) user = PROVIDERS.identity_api.shadow_federated_user( - self.federated_user['idp_id'], self.federated_user['protocol_id'], - federated_user1) + self.federated_user['idp_id'], + self.federated_user['protocol_id'], + federated_user1, + ) self.assertIsNotNone(user['id']) self.assertEqual(7, len(user.keys())) @@ -50,8 +52,10 @@ class ShadowUsersCoreTests(object): # introduce the user to keystone for the first time shadow_user1 = PROVIDERS.identity_api.shadow_federated_user( - self.federated_user['idp_id'], self.federated_user['protocol_id'], - federated_user1) + self.federated_user['idp_id'], + self.federated_user['protocol_id'], + federated_user1, + ) self.assertEqual(federated_user1['display_name'], shadow_user1['name']) @@ -65,8 +69,10 @@ class ShadowUsersCoreTests(object): ) shadow_user2 = PROVIDERS.identity_api.shadow_federated_user( - self.federated_user['idp_id'], self.federated_user['protocol_id'], - federated_user2) + self.federated_user['idp_id'], + self.federated_user['protocol_id'], + federated_user2, + ) self.assertEqual(federated_user2['display_name'], shadow_user2['name']) self.assertNotEqual(shadow_user1['name'], shadow_user2['name']) @@ -80,8 +86,10 @@ class ShadowUsersCoreTests(object): ) PROVIDERS.identity_api.shadow_federated_user( - federated_user1['idp_id'], federated_user1['protocol_id'], - federated_user1) + federated_user1['idp_id'], + federated_user1['protocol_id'], + federated_user1, + ) hints = driver_hints.Hints() hints.add_filter('name', federated_user1['display_name']) @@ -96,8 +104,10 @@ class ShadowUsersCoreTests(object): federated_user2['email'] = "some_id_2@mail.provider" PROVIDERS.identity_api.shadow_federated_user( - federated_user2['idp_id'], federated_user2['protocol_id'], - federated_user2) + federated_user2['idp_id'], + federated_user2['protocol_id'], + federated_user2, + ) hints.add_filter('name', federated_user2['display_name']) users = PROVIDERS.identity_api.list_users(hints=hints) @@ -106,8 +116,9 @@ class ShadowUsersCoreTests(object): self.assertEqual(1, len(users)) @staticmethod - def normalize_federated_user_properties_for_test(federated_user, - email=None): + def normalize_federated_user_properties_for_test( + federated_user, email=None + ): federated_user['email'] = email federated_user['id'] = federated_user['unique_id'] federated_user['name'] = federated_user['display_name'] diff --git a/keystone/tests/unit/identity/test_backend_sql.py b/keystone/tests/unit/identity/test_backend_sql.py index 7cbc8f89e1..8be06d534e 100644 --- a/keystone/tests/unit/identity/test_backend_sql.py +++ b/keystone/tests/unit/identity/test_backend_sql.py @@ -35,8 +35,9 @@ PROVIDERS = provider_api.ProviderAPIs class UserPasswordCreatedAtIntTests(test_backend_sql.SqlTests): def config_overrides(self): super(UserPasswordCreatedAtIntTests, self).config_overrides() - self.config_fixture.config(group='security_compliance', - password_expires_days=1) + self.config_fixture.config( + group='security_compliance', password_expires_days=1 + ) def test_user_password_created_expired_at_int_matches_created_at(self): with sql.session_for_read() as session: @@ -45,21 +46,30 @@ class UserPasswordCreatedAtIntTests(test_backend_sql.SqlTests): ) self.assertIsNotNone(user_ref.password_ref._created_at) self.assertIsNotNone(user_ref.password_ref._expires_at) - self.assertEqual(user_ref.password_ref._created_at, - user_ref.password_ref.created_at_int) - self.assertEqual(user_ref.password_ref._expires_at, - user_ref.password_ref.expires_at_int) - self.assertEqual(user_ref.password_ref.created_at, - user_ref.password_ref.created_at_int) - self.assertEqual(user_ref.password_ref.expires_at, - user_ref.password_ref.expires_at_int) + self.assertEqual( + user_ref.password_ref._created_at, + user_ref.password_ref.created_at_int, + ) + self.assertEqual( + user_ref.password_ref._expires_at, + user_ref.password_ref.expires_at_int, + ) + self.assertEqual( + user_ref.password_ref.created_at, + user_ref.password_ref.created_at_int, + ) + self.assertEqual( + user_ref.password_ref.expires_at, + user_ref.password_ref.expires_at_int, + ) class UserPasswordHashingTestsNoCompat(test_backend_sql.SqlTests): def config_overrides(self): super(UserPasswordHashingTestsNoCompat, self).config_overrides() - self.config_fixture.config(group='identity', - password_hash_algorithm='scrypt') + self.config_fixture.config( + group='identity', password_hash_algorithm='scrypt' + ) def test_configured_algorithm_used(self): with sql.session_for_read() as session: @@ -68,7 +78,8 @@ class UserPasswordHashingTestsNoCompat(test_backend_sql.SqlTests): ) self.assertEqual( passlib.hash.scrypt, - password_hashing._get_hasher_from_ident(user_ref.password)) + password_hashing._get_hasher_from_ident(user_ref.password), + ) class UserResourceOptionTests(test_backend_sql.SqlTests): @@ -91,18 +102,22 @@ class UserResourceOptionTests(test_backend_sql.SqlTests): opt_value = uuid.uuid4().hex user['options'][self.option1.option_name] = opt_value new_ref = PROVIDERS.identity_api.update_user(user['id'], user) - self.assertEqual(opt_value, - new_ref['options'][self.option1.option_name]) + self.assertEqual( + opt_value, new_ref['options'][self.option1.option_name] + ) raw_ref = self._get_user_ref(user['id']) self.assertIn(self.option1.option_id, raw_ref._resource_option_mapper) self.assertEqual( opt_value, raw_ref._resource_option_mapper[ - self.option1.option_id].option_value) + self.option1.option_id + ].option_value, + ) api_get_ref = PROVIDERS.identity_api.get_user(user['id']) # Ensure options are properly set in a .get_user call. - self.assertEqual(opt_value, - api_get_ref['options'][self.option1.option_name]) + self.assertEqual( + opt_value, api_get_ref['options'][self.option1.option_name] + ) def test_user_add_update_delete_option_in_resource_option(self): user = self._create_user(self._get_user_dict()) @@ -113,14 +128,16 @@ class UserResourceOptionTests(test_backend_sql.SqlTests): # Update user to add the new value option user['options'][self.option1.option_name] = opt_value new_ref = PROVIDERS.identity_api.update_user(user['id'], user) - self.assertEqual(opt_value, - new_ref['options'][self.option1.option_name]) + self.assertEqual( + opt_value, new_ref['options'][self.option1.option_name] + ) # Update the option Value and confirm it is updated user['options'][self.option1.option_name] = new_opt_value new_ref = PROVIDERS.identity_api.update_user(user['id'], user) - self.assertEqual(new_opt_value, - new_ref['options'][self.option1.option_name]) + self.assertEqual( + new_opt_value, new_ref['options'][self.option1.option_name] + ) # Set the option value to None, meaning delete the option user['options'][self.option1.option_name] = None @@ -136,8 +153,9 @@ class UserResourceOptionTests(test_backend_sql.SqlTests): # Update user to add the new value option user['options'][self.option1.option_name] = opt_value new_ref = PROVIDERS.identity_api.update_user(user['id'], user) - self.assertEqual(opt_value, - new_ref['options'][self.option1.option_name]) + self.assertEqual( + opt_value, new_ref['options'][self.option1.option_name] + ) # Update the option value for option 2 and confirm it is updated and # option1's value remains the same. Option 1 is not specified in the @@ -145,34 +163,44 @@ class UserResourceOptionTests(test_backend_sql.SqlTests): del user['options'][self.option1.option_name] user['options'][self.option2.option_name] = opt2_value new_ref = PROVIDERS.identity_api.update_user(user['id'], user) - self.assertEqual(opt_value, - new_ref['options'][self.option1.option_name]) - self.assertEqual(opt2_value, - new_ref['options'][self.option2.option_name]) + self.assertEqual( + opt_value, new_ref['options'][self.option1.option_name] + ) + self.assertEqual( + opt2_value, new_ref['options'][self.option2.option_name] + ) raw_ref = self._get_user_ref(user['id']) self.assertEqual( opt_value, raw_ref._resource_option_mapper[ - self.option1.option_id].option_value) + self.option1.option_id + ].option_value, + ) self.assertEqual( opt2_value, raw_ref._resource_option_mapper[ - self.option2.option_id].option_value) + self.option2.option_id + ].option_value, + ) # Set the option value to None, meaning delete the option, ensure # option 2 still remains and has the right value user['options'][self.option1.option_name] = None new_ref = PROVIDERS.identity_api.update_user(user['id'], user) self.assertNotIn(self.option1.option_name, new_ref['options']) - self.assertEqual(opt2_value, - new_ref['options'][self.option2.option_name]) + self.assertEqual( + opt2_value, new_ref['options'][self.option2.option_name] + ) raw_ref = self._get_user_ref(user['id']) - self.assertNotIn(raw_ref._resource_option_mapper, - self.option1.option_id) + self.assertNotIn( + raw_ref._resource_option_mapper, self.option1.option_id + ) self.assertEqual( opt2_value, raw_ref._resource_option_mapper[ - self.option2.option_id].option_value) + self.option2.option_id + ].option_value, + ) def test_unregistered_resource_option_deleted(self): user = self._create_user(self._get_user_dict()) @@ -183,8 +211,9 @@ class UserResourceOptionTests(test_backend_sql.SqlTests): # Update user to add the new value option user['options'][self.option1.option_name] = opt_value new_ref = PROVIDERS.identity_api.update_user(user['id'], user) - self.assertEqual(opt_value, - new_ref['options'][self.option1.option_name]) + self.assertEqual( + opt_value, new_ref['options'][self.option1.option_name] + ) # Update the option value for option 2 and confirm it is updated and # option1's value remains the same. Option 1 is not specified in the @@ -192,19 +221,25 @@ class UserResourceOptionTests(test_backend_sql.SqlTests): del user['options'][self.option1.option_name] user['options'][self.option2.option_name] = opt2_value new_ref = PROVIDERS.identity_api.update_user(user['id'], user) - self.assertEqual(opt_value, - new_ref['options'][self.option1.option_name]) - self.assertEqual(opt2_value, - new_ref['options'][self.option2.option_name]) + self.assertEqual( + opt_value, new_ref['options'][self.option1.option_name] + ) + self.assertEqual( + opt2_value, new_ref['options'][self.option2.option_name] + ) raw_ref = self._get_user_ref(user['id']) self.assertEqual( opt_value, raw_ref._resource_option_mapper[ - self.option1.option_id].option_value) + self.option1.option_id + ].option_value, + ) self.assertEqual( opt2_value, raw_ref._resource_option_mapper[ - self.option2.option_id].option_value) + self.option2.option_id + ].option_value, + ) # clear registered options and only re-register option1, update user # and confirm option2 is gone from the ref and returned dict @@ -213,15 +248,19 @@ class UserResourceOptionTests(test_backend_sql.SqlTests): user['name'] = uuid.uuid4().hex new_ref = PROVIDERS.identity_api.update_user(user['id'], user) self.assertNotIn(self.option2.option_name, new_ref['options']) - self.assertEqual(opt_value, - new_ref['options'][self.option1.option_name]) + self.assertEqual( + opt_value, new_ref['options'][self.option1.option_name] + ) raw_ref = self._get_user_ref(user['id']) - self.assertNotIn(raw_ref._resource_option_mapper, - self.option2.option_id) + self.assertNotIn( + raw_ref._resource_option_mapper, self.option2.option_id + ) self.assertEqual( opt_value, raw_ref._resource_option_mapper[ - self.option1.option_id].option_value) + self.option1.option_id + ].option_value, + ) def _get_user_ref(self, user_id): with sql.session_for_read() as session: @@ -239,7 +278,7 @@ class UserResourceOptionTests(test_backend_sql.SqlTests): 'name': uuid.uuid4().hex, 'domain_id': CONF.identity.default_domain_id, 'enabled': True, - 'password': uuid.uuid4().hex + 'password': uuid.uuid4().hex, } return user @@ -252,19 +291,22 @@ class DisableInactiveUserTests(test_backend_sql.SqlTests): self.max_inactive_days = 90 self.config_fixture.config( group='security_compliance', - disable_user_account_days_inactive=self.max_inactive_days) + disable_user_account_days_inactive=self.max_inactive_days, + ) def test_authenticate_user_disabled_due_to_inactivity(self): # create user and set last_active_at beyond the max - last_active_at = ( - datetime.datetime.utcnow() - - datetime.timedelta(days=self.max_inactive_days + 1)) + last_active_at = datetime.datetime.utcnow() - datetime.timedelta( + days=self.max_inactive_days + 1 + ) user = self._create_user(self.user_dict, last_active_at.date()) with self.make_request(): - self.assertRaises(exception.UserDisabled, - PROVIDERS.identity_api.authenticate, - user_id=user['id'], - password=self.password) + self.assertRaises( + exception.UserDisabled, + PROVIDERS.identity_api.authenticate, + user_id=user['id'], + password=self.password, + ) # verify that the user is actually disabled user = PROVIDERS.identity_api.get_user(user['id']) self.assertFalse(user['enabled']) @@ -279,8 +321,9 @@ class DisableInactiveUserTests(test_backend_sql.SqlTests): def test_authenticate_user_not_disabled_due_to_inactivity(self): # create user and set last_active_at just below the max last_active_at = ( - datetime.datetime.utcnow() - - datetime.timedelta(days=self.max_inactive_days - 1)).date() + datetime.datetime.utcnow() + - datetime.timedelta(days=self.max_inactive_days - 1) + ).date() user = self._create_user(self.user_dict, last_active_at) with self.make_request(): user = PROVIDERS.identity_api.authenticate( @@ -292,8 +335,9 @@ class DisableInactiveUserTests(test_backend_sql.SqlTests): user = PROVIDERS.identity_api.create_user(self.user_dict) # set last_active_at just beyond the max last_active_at = ( - datetime.datetime.utcnow() - - datetime.timedelta(self.max_inactive_days + 1)).date() + datetime.datetime.utcnow() + - datetime.timedelta(self.max_inactive_days + 1) + ).date() self._update_user_last_active_at(user['id'], last_active_at) # get user and verify that the user is actually disabled user = PROVIDERS.identity_api.get_user(user['id']) @@ -309,16 +353,18 @@ class DisableInactiveUserTests(test_backend_sql.SqlTests): self.assertTrue(user['enabled']) # set last_active_at just below the max last_active_at = ( - datetime.datetime.utcnow() - - datetime.timedelta(self.max_inactive_days - 1)).date() + datetime.datetime.utcnow() + - datetime.timedelta(self.max_inactive_days - 1) + ).date() self._update_user_last_active_at(user['id'], last_active_at) # get user and verify that the user is still enabled user = PROVIDERS.identity_api.get_user(user['id']) self.assertTrue(user['enabled']) def test_enabled_after_create_update_user(self): - self.config_fixture.config(group='security_compliance', - disable_user_account_days_inactive=90) + self.config_fixture.config( + group='security_compliance', disable_user_account_days_inactive=90 + ) # create user without enabled; assert enabled del self.user_dict['enabled'] user = PROVIDERS.identity_api.create_user(self.user_dict) @@ -344,32 +390,31 @@ class DisableInactiveUserTests(test_backend_sql.SqlTests): def test_ignore_user_inactivity(self): self.user_dict['options'] = {'ignore_user_inactivity': True} - user = PROVIDERS.identity_api.create_user( - self.user_dict) + user = PROVIDERS.identity_api.create_user(self.user_dict) # set last_active_at just beyond the max last_active_at = ( - datetime.datetime.utcnow() - - datetime.timedelta(self.max_inactive_days + 1)).date() + datetime.datetime.utcnow() + - datetime.timedelta(self.max_inactive_days + 1) + ).date() self._update_user_last_active_at(user['id'], last_active_at) # get user and verify that the user is not disabled user = PROVIDERS.identity_api.get_user(user['id']) self.assertTrue(user['enabled']) def test_ignore_user_inactivity_with_user_disabled(self): - user = PROVIDERS.identity_api.create_user( - self.user_dict) + user = PROVIDERS.identity_api.create_user(self.user_dict) # set last_active_at just beyond the max last_active_at = ( - datetime.datetime.utcnow() - - datetime.timedelta(self.max_inactive_days + 1)).date() + datetime.datetime.utcnow() + - datetime.timedelta(self.max_inactive_days + 1) + ).date() self._update_user_last_active_at(user['id'], last_active_at) # get user and verify that the user is disabled user = PROVIDERS.identity_api.get_user(user['id']) self.assertFalse(user['enabled']) # update disabled user with ignore_user_inactivity to true user['options'] = {'ignore_user_inactivity': True} - user = PROVIDERS.identity_api.update_user( - user['id'], user) + user = PROVIDERS.identity_api.update_user(user['id'], user) # user is not enabled user = PROVIDERS.identity_api.get_user(user['id']) self.assertFalse(user['enabled']) @@ -384,7 +429,7 @@ class DisableInactiveUserTests(test_backend_sql.SqlTests): 'name': uuid.uuid4().hex, 'domain_id': CONF.identity.default_domain_id, 'enabled': True, - 'password': password + 'password': password, } return user @@ -411,32 +456,42 @@ class PasswordHistoryValidationTests(test_backend_sql.SqlTests): def setUp(self): super(PasswordHistoryValidationTests, self).setUp() self.max_cnt = 3 - self.config_fixture.config(group='security_compliance', - unique_last_password_count=self.max_cnt) + self.config_fixture.config( + group='security_compliance', + unique_last_password_count=self.max_cnt, + ) def test_validate_password_history_with_invalid_password(self): password = uuid.uuid4().hex user = self._create_user(password) # Attempt to change to the same password with self.make_request(): - self.assertRaises(exception.PasswordValidationError, - PROVIDERS.identity_api.change_password, - user_id=user['id'], - original_password=password, - new_password=password) + self.assertRaises( + exception.PasswordValidationError, + PROVIDERS.identity_api.change_password, + user_id=user['id'], + original_password=password, + new_password=password, + ) # Attempt to change to a unique password new_password = uuid.uuid4().hex self.assertValidChangePassword(user['id'], password, new_password) # Attempt to change back to the initial password - self.assertRaises(exception.PasswordValidationError, - PROVIDERS.identity_api.change_password, - user_id=user['id'], - original_password=new_password, - new_password=password) + self.assertRaises( + exception.PasswordValidationError, + PROVIDERS.identity_api.change_password, + user_id=user['id'], + original_password=new_password, + new_password=password, + ) def test_validate_password_history_with_valid_password(self): - passwords = [uuid.uuid4().hex, uuid.uuid4().hex, uuid.uuid4().hex, - uuid.uuid4().hex] + passwords = [ + uuid.uuid4().hex, + uuid.uuid4().hex, + uuid.uuid4().hex, + uuid.uuid4().hex, + ] user = self._create_user(passwords[0]) self.assertValidChangePassword(user['id'], passwords[0], passwords[1]) self.assertValidChangePassword(user['id'], passwords[1], passwords[2]) @@ -447,8 +502,9 @@ class PasswordHistoryValidationTests(test_backend_sql.SqlTests): self.assertValidChangePassword(user['id'], passwords[3], passwords[0]) def test_validate_password_history_with_valid_password_only_once(self): - self.config_fixture.config(group='security_compliance', - unique_last_password_count=1) + self.config_fixture.config( + group='security_compliance', unique_last_password_count=1 + ) passwords = [uuid.uuid4().hex, uuid.uuid4().hex] user = self._create_user(passwords[0]) self.assertValidChangePassword(user['id'], passwords[0], passwords[1]) @@ -467,15 +523,18 @@ class PasswordHistoryValidationTests(test_backend_sql.SqlTests): self.assertValidChangePassword(user['id'], passwords[0], passwords[1]) # Attempt to update with a previous password with self.make_request(): - self.assertRaises(exception.PasswordValidationError, - PROVIDERS.identity_api.change_password, - user_id=user['id'], - original_password=passwords[1], - new_password=passwords[0]) + self.assertRaises( + exception.PasswordValidationError, + PROVIDERS.identity_api.change_password, + user_id=user['id'], + original_password=passwords[1], + new_password=passwords[0], + ) def test_disable_password_history_and_repeat_same_password(self): - self.config_fixture.config(group='security_compliance', - unique_last_password_count=0) + self.config_fixture.config( + group='security_compliance', unique_last_password_count=0 + ) password = uuid.uuid4().hex user = self._create_user(password) # Repeatedly change password with the same password @@ -510,28 +569,36 @@ class PasswordHistoryValidationTests(test_backend_sql.SqlTests): self._add_passwords_to_history(user, n=4) user_ref = self._get_user_ref(user['id']) self.assertEqual( - len(user_ref.local_user.passwords), (self.max_cnt + 1)) + len(user_ref.local_user.passwords), (self.max_cnt + 1) + ) def test_truncate_passwords_when_max_is_default(self): self.max_cnt = 1 expected_length = self.max_cnt + 1 - self.config_fixture.config(group='security_compliance', - unique_last_password_count=self.max_cnt) + self.config_fixture.config( + group='security_compliance', + unique_last_password_count=self.max_cnt, + ) user = self._create_user(uuid.uuid4().hex) self._add_passwords_to_history(user, n=4) user_ref = self._get_user_ref(user['id']) self.assertEqual(len(user_ref.local_user.passwords), expected_length) # Start with multiple passwords and then change max_cnt to one self.max_cnt = 4 - self.config_fixture.config(group='security_compliance', - unique_last_password_count=self.max_cnt) + self.config_fixture.config( + group='security_compliance', + unique_last_password_count=self.max_cnt, + ) self._add_passwords_to_history(user, n=self.max_cnt) user_ref = self._get_user_ref(user['id']) self.assertEqual( - len(user_ref.local_user.passwords), (self.max_cnt + 1)) + len(user_ref.local_user.passwords), (self.max_cnt + 1) + ) self.max_cnt = 1 - self.config_fixture.config(group='security_compliance', - unique_last_password_count=self.max_cnt) + self.config_fixture.config( + group='security_compliance', + unique_last_password_count=self.max_cnt, + ) self._add_passwords_to_history(user, n=1) user_ref = self._get_user_ref(user['id']) self.assertEqual(len(user_ref.local_user.passwords), expected_length) @@ -539,8 +606,10 @@ class PasswordHistoryValidationTests(test_backend_sql.SqlTests): def test_truncate_passwords_when_max_is_default_and_no_password(self): expected_length = 1 self.max_cnt = 1 - self.config_fixture.config(group='security_compliance', - unique_last_password_count=self.max_cnt) + self.config_fixture.config( + group='security_compliance', + unique_last_password_count=self.max_cnt, + ) user = { 'name': uuid.uuid4().hex, 'domain_id': 'default', @@ -556,15 +625,16 @@ class PasswordHistoryValidationTests(test_backend_sql.SqlTests): 'name': uuid.uuid4().hex, 'domain_id': 'default', 'enabled': True, - 'password': password + 'password': password, } return PROVIDERS.identity_api.create_user(user) def assertValidChangePassword(self, user_id, password, new_password): with self.make_request(): PROVIDERS.identity_api.change_password( - user_id=user_id, original_password=password, - new_password=new_password + user_id=user_id, + original_password=password, + new_password=new_password, ) PROVIDERS.identity_api.authenticate( user_id=user_id, password=new_password @@ -584,39 +654,42 @@ class LockingOutUserTests(test_backend_sql.SqlTests): def setUp(self): super(LockingOutUserTests, self).setUp() self.config_fixture.config( - group='security_compliance', - lockout_failure_attempts=6) + group='security_compliance', lockout_failure_attempts=6 + ) self.config_fixture.config( - group='security_compliance', - lockout_duration=5) + group='security_compliance', lockout_duration=5 + ) # create user self.password = uuid.uuid4().hex user_dict = { 'name': uuid.uuid4().hex, 'domain_id': CONF.identity.default_domain_id, 'enabled': True, - 'password': self.password + 'password': self.password, } self.user = PROVIDERS.identity_api.create_user(user_dict) def test_locking_out_user_after_max_failed_attempts(self): with self.make_request(): # authenticate with wrong password - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=self.user['id'], - password=uuid.uuid4().hex) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=self.user['id'], + password=uuid.uuid4().hex, + ) # authenticate with correct password PROVIDERS.identity_api.authenticate( - user_id=self.user['id'], - password=self.password + user_id=self.user['id'], password=self.password ) # test locking out user after max failed attempts self._fail_auth_repeatedly(self.user['id']) - self.assertRaises(exception.Unauthorized, - PROVIDERS.identity_api.authenticate, - user_id=self.user['id'], - password=uuid.uuid4().hex) + self.assertRaises( + exception.Unauthorized, + PROVIDERS.identity_api.authenticate, + user_id=self.user['id'], + password=uuid.uuid4().hex, + ) def test_lock_out_for_ignored_user(self): # mark the user as exempt from failed password attempts @@ -628,30 +701,32 @@ class LockingOutUserTests(test_backend_sql.SqlTests): self._fail_auth_repeatedly(self.user['id']) # authenticate with wrong password, account should not be locked with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=self.user['id'], - password=uuid.uuid4().hex) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=self.user['id'], + password=uuid.uuid4().hex, + ) # authenticate with correct password, account should not be locked PROVIDERS.identity_api.authenticate( - user_id=self.user['id'], - password=self.password + user_id=self.user['id'], password=self.password ) def test_set_enabled_unlocks_user(self): with self.make_request(): # lockout user self._fail_auth_repeatedly(self.user['id']) - self.assertRaises(exception.Unauthorized, - PROVIDERS.identity_api.authenticate, - user_id=self.user['id'], - password=uuid.uuid4().hex) + self.assertRaises( + exception.Unauthorized, + PROVIDERS.identity_api.authenticate, + user_id=self.user['id'], + password=uuid.uuid4().hex, + ) # set enabled, user should be unlocked self.user['enabled'] = True PROVIDERS.identity_api.update_user(self.user['id'], self.user) user_ret = PROVIDERS.identity_api.authenticate( - user_id=self.user['id'], - password=self.password + user_id=self.user['id'], password=self.password ) self.assertTrue(user_ret['enabled']) @@ -661,25 +736,31 @@ class LockingOutUserTests(test_backend_sql.SqlTests): with self.make_request(): # lockout user self._fail_auth_repeatedly(self.user['id']) - self.assertRaises(exception.Unauthorized, - PROVIDERS.identity_api.authenticate, - user_id=self.user['id'], - password=uuid.uuid4().hex) + self.assertRaises( + exception.Unauthorized, + PROVIDERS.identity_api.authenticate, + user_id=self.user['id'], + password=uuid.uuid4().hex, + ) # freeze time past the duration, user should be unlocked and # failed auth count should get reset - frozen_time.tick(delta=datetime.timedelta( - seconds=CONF.security_compliance.lockout_duration + 1)) + frozen_time.tick( + delta=datetime.timedelta( + seconds=CONF.security_compliance.lockout_duration + 1 + ) + ) PROVIDERS.identity_api.authenticate( - user_id=self.user['id'], - password=self.password + user_id=self.user['id'], password=self.password ) # test failed auth count was reset by authenticating with the # wrong password, should raise an assertion error and not # account locked - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=self.user['id'], - password=uuid.uuid4().hex) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=self.user['id'], + password=uuid.uuid4().hex, + ) def test_lockout_duration_failed_auth_cnt_resets(self): # freeze time @@ -687,29 +768,38 @@ class LockingOutUserTests(test_backend_sql.SqlTests): with self.make_request(): # lockout user self._fail_auth_repeatedly(self.user['id']) - self.assertRaises(exception.Unauthorized, - PROVIDERS.identity_api.authenticate, - user_id=self.user['id'], - password=uuid.uuid4().hex) + self.assertRaises( + exception.Unauthorized, + PROVIDERS.identity_api.authenticate, + user_id=self.user['id'], + password=uuid.uuid4().hex, + ) # freeze time past the duration, failed_auth_cnt should reset - frozen_time.tick(delta=datetime.timedelta( - seconds=CONF.security_compliance.lockout_duration + 1)) + frozen_time.tick( + delta=datetime.timedelta( + seconds=CONF.security_compliance.lockout_duration + 1 + ) + ) # repeat failed auth the max times self._fail_auth_repeatedly(self.user['id']) # test user account is locked - self.assertRaises(exception.Unauthorized, - PROVIDERS.identity_api.authenticate, - user_id=self.user['id'], - password=uuid.uuid4().hex) + self.assertRaises( + exception.Unauthorized, + PROVIDERS.identity_api.authenticate, + user_id=self.user['id'], + password=uuid.uuid4().hex, + ) def _fail_auth_repeatedly(self, user_id): wrong_password = uuid.uuid4().hex for _ in range(CONF.security_compliance.lockout_failure_attempts): with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=user_id, - password=wrong_password) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=user_id, + password=wrong_password, + ) class PasswordExpiresValidationTests(test_backend_sql.SqlTests): @@ -718,30 +808,28 @@ class PasswordExpiresValidationTests(test_backend_sql.SqlTests): self.password = uuid.uuid4().hex self.user_dict = self._get_test_user_dict(self.password) self.config_fixture.config( - group='security_compliance', - password_expires_days=90) + group='security_compliance', password_expires_days=90 + ) def test_authenticate_with_expired_password(self): # set password created_at so that the password will expire - password_created_at = ( - datetime.datetime.utcnow() - - datetime.timedelta( - days=CONF.security_compliance.password_expires_days + 1) + password_created_at = datetime.datetime.utcnow() - datetime.timedelta( + days=CONF.security_compliance.password_expires_days + 1 ) user = self._create_user(self.user_dict, password_created_at) # test password is expired with self.make_request(): - self.assertRaises(exception.PasswordExpired, - PROVIDERS.identity_api.authenticate, - user_id=user['id'], - password=self.password) + self.assertRaises( + exception.PasswordExpired, + PROVIDERS.identity_api.authenticate, + user_id=user['id'], + password=self.password, + ) def test_authenticate_with_non_expired_password(self): # set password created_at so that the password will not expire - password_created_at = ( - datetime.datetime.utcnow() - - datetime.timedelta( - days=CONF.security_compliance.password_expires_days - 1) + password_created_at = datetime.datetime.utcnow() - datetime.timedelta( + days=CONF.security_compliance.password_expires_days - 1 ) user = self._create_user(self.user_dict, password_created_at) # test password is not expired @@ -753,26 +841,24 @@ class PasswordExpiresValidationTests(test_backend_sql.SqlTests): def test_authenticate_with_expired_password_for_ignore_user_option(self): # set user to have the 'ignore_password_expiry' option set to False self.user_dict.setdefault('options', {})[ - iro.IGNORE_PASSWORD_EXPIRY_OPT.option_name] = False + iro.IGNORE_PASSWORD_EXPIRY_OPT.option_name + ] = False # set password created_at so that the password will expire - password_created_at = ( - datetime.datetime.utcnow() - - datetime.timedelta( - days=CONF.security_compliance.password_expires_days + 1) + password_created_at = datetime.datetime.utcnow() - datetime.timedelta( + days=CONF.security_compliance.password_expires_days + 1 ) user = self._create_user(self.user_dict, password_created_at) with self.make_request(): - self.assertRaises(exception.PasswordExpired, - PROVIDERS.identity_api.authenticate, - user_id=user['id'], - password=self.password) + self.assertRaises( + exception.PasswordExpired, + PROVIDERS.identity_api.authenticate, + user_id=user['id'], + password=self.password, + ) # update user to explicitly have the expiry option to True - user['options'][ - iro.IGNORE_PASSWORD_EXPIRY_OPT.option_name] = True - user = PROVIDERS.identity_api.update_user( - user['id'], user - ) + user['options'][iro.IGNORE_PASSWORD_EXPIRY_OPT.option_name] = True + user = PROVIDERS.identity_api.update_user(user['id'], user) # test password is not expired due to ignore option PROVIDERS.identity_api.authenticate( user_id=user['id'], password=self.password @@ -784,7 +870,7 @@ class PasswordExpiresValidationTests(test_backend_sql.SqlTests): 'name': uuid.uuid4().hex, 'domain_id': CONF.identity.default_domain_id, 'enabled': True, - 'password': password + 'password': password, } return test_user_dict @@ -797,7 +883,8 @@ class PasswordExpiresValidationTests(test_backend_sql.SqlTests): user_ref = session.get(model.User, user_dict['id']) user_ref.password_ref.created_at = password_created_at user_ref.password_ref.expires_at = ( - user_ref._get_password_expires_at(password_created_at)) + user_ref._get_password_expires_at(password_created_at) + ) return base.filter_user(user_ref.to_dict()) @@ -805,65 +892,74 @@ class MinimumPasswordAgeTests(test_backend_sql.SqlTests): def setUp(self): super(MinimumPasswordAgeTests, self).setUp() self.config_fixture.config( - group='security_compliance', - minimum_password_age=1) + group='security_compliance', minimum_password_age=1 + ) self.initial_password = uuid.uuid4().hex self.user = self._create_new_user(self.initial_password) def test_user_cannot_change_password_before_min_age(self): # user can change password after create new_password = uuid.uuid4().hex - self.assertValidChangePassword(self.user['id'], self.initial_password, - new_password) + self.assertValidChangePassword( + self.user['id'], self.initial_password, new_password + ) # user cannot change password before min age with self.make_request(): - self.assertRaises(exception.PasswordAgeValidationError, - PROVIDERS.identity_api.change_password, - user_id=self.user['id'], - original_password=new_password, - new_password=uuid.uuid4().hex) + self.assertRaises( + exception.PasswordAgeValidationError, + PROVIDERS.identity_api.change_password, + user_id=self.user['id'], + original_password=new_password, + new_password=uuid.uuid4().hex, + ) def test_user_can_change_password_after_min_age(self): # user can change password after create new_password = uuid.uuid4().hex - self.assertValidChangePassword(self.user['id'], self.initial_password, - new_password) + self.assertValidChangePassword( + self.user['id'], self.initial_password, new_password + ) # set password_created_at so that the min password age has past - password_created_at = ( - datetime.datetime.utcnow() - - datetime.timedelta( - days=CONF.security_compliance.minimum_password_age + 1)) + password_created_at = datetime.datetime.utcnow() - datetime.timedelta( + days=CONF.security_compliance.minimum_password_age + 1 + ) self._update_password_created_at(self.user['id'], password_created_at) # user can change their password after min password age has past - self.assertValidChangePassword(self.user['id'], new_password, - uuid.uuid4().hex) + self.assertValidChangePassword( + self.user['id'], new_password, uuid.uuid4().hex + ) def test_user_can_change_password_after_admin_reset(self): # user can change password after create new_password = uuid.uuid4().hex - self.assertValidChangePassword(self.user['id'], self.initial_password, - new_password) + self.assertValidChangePassword( + self.user['id'], self.initial_password, new_password + ) # user cannot change password before min age with self.make_request(): - self.assertRaises(exception.PasswordAgeValidationError, - PROVIDERS.identity_api.change_password, - user_id=self.user['id'], - original_password=new_password, - new_password=uuid.uuid4().hex) + self.assertRaises( + exception.PasswordAgeValidationError, + PROVIDERS.identity_api.change_password, + user_id=self.user['id'], + original_password=new_password, + new_password=uuid.uuid4().hex, + ) # admin reset new_password = uuid.uuid4().hex self.user['password'] = new_password PROVIDERS.identity_api.update_user(self.user['id'], self.user) # user can change password after admin reset - self.assertValidChangePassword(self.user['id'], new_password, - uuid.uuid4().hex) + self.assertValidChangePassword( + self.user['id'], new_password, uuid.uuid4().hex + ) def assertValidChangePassword(self, user_id, password, new_password): with self.make_request(): PROVIDERS.identity_api.change_password( - user_id=user_id, original_password=password, - new_password=new_password + user_id=user_id, + original_password=password, + new_password=new_password, ) PROVIDERS.identity_api.authenticate( user_id=user_id, password=new_password @@ -874,7 +970,7 @@ class MinimumPasswordAgeTests(test_backend_sql.SqlTests): 'name': uuid.uuid4().hex, 'domain_id': CONF.identity.default_domain_id, 'enabled': True, - 'password': password + 'password': password, } return PROVIDERS.identity_api.create_user(user) @@ -899,21 +995,24 @@ class ChangePasswordRequiredAfterFirstUse(test_backend_sql.SqlTests): def _create_user(self, password, change_password_upon_first_use): self.config_fixture.config( group='security_compliance', - change_password_upon_first_use=change_password_upon_first_use) + change_password_upon_first_use=change_password_upon_first_use, + ) user_dict = { 'name': uuid.uuid4().hex, 'domain_id': CONF.identity.default_domain_id, 'enabled': True, - 'password': password + 'password': password, } return PROVIDERS.identity_api.create_user(user_dict) def assertPasswordIsExpired(self, user_id, password): with self.make_request(): - self.assertRaises(exception.PasswordExpired, - PROVIDERS.identity_api.authenticate, - user_id=user_id, - password=password) + self.assertRaises( + exception.PasswordExpired, + PROVIDERS.identity_api.authenticate, + user_id=user_id, + password=password, + ) def assertPasswordIsNotExpired(self, user_id, password): with self.make_request(): @@ -941,8 +1040,8 @@ class ChangePasswordRequiredAfterFirstUse(test_backend_sql.SqlTests): self.assertPasswordIsNotExpired(user['id'], initial_password) # enable change_password_upon_first_use self.config_fixture.config( - group='security_compliance', - change_password_upon_first_use=True) + group='security_compliance', change_password_upon_first_use=True + ) # admin reset, password expired admin_password = uuid.uuid4().hex user['password'] = admin_password @@ -974,8 +1073,8 @@ class ChangePasswordRequiredAfterFirstUse(test_backend_sql.SqlTests): self.assertPasswordIsNotExpired(user['id'], initial_password) # enable change_password_upon_first_use self.config_fixture.config( - group='security_compliance', - change_password_upon_first_use=True) + group='security_compliance', change_password_upon_first_use=True + ) # ignore user and reset password, password not expired user['options'][iro.IGNORE_CHANGE_PASSWORD_OPT.option_name] = True admin_password = uuid.uuid4().hex diff --git a/keystone/tests/unit/identity/test_backends.py b/keystone/tests/unit/identity/test_backends.py index 97d0863cdc..5b061ea515 100644 --- a/keystone/tests/unit/identity/test_backends.py +++ b/keystone/tests/unit/identity/test_backends.py @@ -43,23 +43,27 @@ class IdentityTests(object): def test_authenticate_bad_user(self): with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=uuid.uuid4().hex, - password=self.user_foo['password']) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=uuid.uuid4().hex, + password=self.user_foo['password'], + ) def test_authenticate_bad_password(self): with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=self.user_foo['id'], - password=uuid.uuid4().hex) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=self.user_foo['id'], + password=uuid.uuid4().hex, + ) def test_authenticate(self): with self.make_request(): user_ref = PROVIDERS.identity_api.authenticate( - user_id=self.user_sna['id'], - password=self.user_sna['password']) + user_id=self.user_sna['id'], password=self.user_sna['password'] + ) # NOTE(termie): the password field is left in user_sna to make # it easier to authenticate in tests, but should # not be returned by the api @@ -84,8 +88,8 @@ class IdentityTests(object): ) with self.make_request(): user_ref = PROVIDERS.identity_api.authenticate( - user_id=new_user['id'], - password=user['password']) + user_id=new_user['id'], password=user['password'] + ) self.assertNotIn('password', user_ref) # NOTE(termie): the password field is left in user_sna to make # it easier to authenticate in tests, but should @@ -93,7 +97,8 @@ class IdentityTests(object): user.pop('password') self.assertLessEqual(user.items(), user_ref.items()) role_list = PROVIDERS.assignment_api.get_roles_for_user_and_project( - new_user['id'], self.project_baz['id']) + new_user['id'], self.project_baz['id'] + ) self.assertEqual(1, len(role_list)) self.assertIn(role_member['id'], role_list) @@ -103,15 +108,18 @@ class IdentityTests(object): PROVIDERS.identity_api.create_user(user) with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=id_, - password='password') + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=id_, + password='password', + ) def test_create_unicode_user_name(self): unicode_name = u'name \u540d\u5b57' - user = unit.new_user_ref(name=unicode_name, - domain_id=CONF.identity.default_domain_id) + user = unit.new_user_ref( + name=unicode_name, domain_id=CONF.identity.default_domain_id + ) ref = PROVIDERS.identity_api.create_user(user) self.assertEqual(unicode_name, ref['name']) @@ -145,15 +153,17 @@ class IdentityTests(object): PROVIDERS.identity_api.get_user(ref['id']) # delete bypassing identity api domain_id, driver, entity_id = ( - PROVIDERS.identity_api._get_domain_driver_and_entity_id(ref['id'])) + PROVIDERS.identity_api._get_domain_driver_and_entity_id(ref['id']) + ) driver.delete_user(entity_id) self.assertDictEqual(ref, PROVIDERS.identity_api.get_user(ref['id'])) PROVIDERS.identity_api.get_user.invalidate( PROVIDERS.identity_api, ref['id'] ) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, ref['id']) + self.assertRaises( + exception.UserNotFound, PROVIDERS.identity_api.get_user, ref['id'] + ) user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user = PROVIDERS.identity_api.create_user(user) ref = PROVIDERS.identity_api.get_user_by_name( @@ -166,22 +176,26 @@ class IdentityTests(object): user_updated = PROVIDERS.identity_api.update_user(ref['id'], user) self.assertLessEqual( PROVIDERS.identity_api.get_user(ref['id']).items(), - user_updated.items() + user_updated.items(), ) self.assertLessEqual( PROVIDERS.identity_api.get_user_by_name( - ref['name'], ref['domain_id']).items(), - user_updated.items() + ref['name'], ref['domain_id'] + ).items(), + user_updated.items(), ) def test_get_user_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, - uuid.uuid4().hex) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.get_user, + uuid.uuid4().hex, + ) def test_get_user_by_name(self): user_ref = PROVIDERS.identity_api.get_user_by_name( - self.user_foo['name'], CONF.identity.default_domain_id) + self.user_foo['name'], CONF.identity.default_domain_id + ) # NOTE(termie): the password field is left in user_foo to make # it easier to authenticate in tests, but should # not be returned by the api @@ -197,19 +211,27 @@ class IdentityTests(object): ) # delete bypassing the identity api. domain_id, driver, entity_id = ( - PROVIDERS.identity_api._get_domain_driver_and_entity_id(ref['id'])) + PROVIDERS.identity_api._get_domain_driver_and_entity_id(ref['id']) + ) driver.delete_user(entity_id) - self.assertDictEqual(ref, PROVIDERS.identity_api.get_user_by_name( - user['name'], CONF.identity.default_domain_id)) + self.assertDictEqual( + ref, + PROVIDERS.identity_api.get_user_by_name( + user['name'], CONF.identity.default_domain_id + ), + ) PROVIDERS.identity_api.get_user_by_name.invalidate( PROVIDERS.identity_api, user['name'], - CONF.identity.default_domain_id + CONF.identity.default_domain_id, + ) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.get_user_by_name, + user['name'], + CONF.identity.default_domain_id, ) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user_by_name, - user['name'], CONF.identity.default_domain_id) user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user = PROVIDERS.identity_api.create_user(user) ref = PROVIDERS.identity_api.get_user_by_name( @@ -219,36 +241,38 @@ class IdentityTests(object): user_updated = PROVIDERS.identity_api.update_user(ref['id'], user) self.assertLessEqual( PROVIDERS.identity_api.get_user(ref['id']).items(), - user_updated.items() + user_updated.items(), ) self.assertLessEqual( PROVIDERS.identity_api.get_user_by_name( - ref['name'], - ref['domain_id'] + ref['name'], ref['domain_id'] ).items(), - user_updated.items() + user_updated.items(), ) def test_get_user_by_name_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user_by_name, - uuid.uuid4().hex, - CONF.identity.default_domain_id) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.get_user_by_name, + uuid.uuid4().hex, + CONF.identity.default_domain_id, + ) def test_create_duplicate_user_name_fails(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user = PROVIDERS.identity_api.create_user(user) - self.assertRaises(exception.Conflict, - PROVIDERS.identity_api.create_user, - user) + self.assertRaises( + exception.Conflict, PROVIDERS.identity_api.create_user, user + ) def test_create_duplicate_user_name_in_different_domains(self): new_domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain) user1 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user2 = unit.new_user_ref(name=user1['name'], - domain_id=new_domain['id']) + user2 = unit.new_user_ref( + name=user1['name'], domain_id=new_domain['id'] + ) PROVIDERS.identity_api.create_user(user1) PROVIDERS.identity_api.create_user(user2) @@ -261,8 +285,12 @@ class IdentityTests(object): user = unit.new_user_ref(domain_id=domain1['id']) user = PROVIDERS.identity_api.create_user(user) user['domain_id'] = domain2['id'] - self.assertRaises(exception.ValidationError, - PROVIDERS.identity_api.update_user, user['id'], user) + self.assertRaises( + exception.ValidationError, + PROVIDERS.identity_api.update_user, + user['id'], + user, + ) def test_rename_duplicate_user_name_fails(self): user1 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -270,25 +298,29 @@ class IdentityTests(object): PROVIDERS.identity_api.create_user(user1) user2 = PROVIDERS.identity_api.create_user(user2) user2['name'] = user1['name'] - self.assertRaises(exception.Conflict, - PROVIDERS.identity_api.update_user, - user2['id'], - user2) + self.assertRaises( + exception.Conflict, + PROVIDERS.identity_api.update_user, + user2['id'], + user2, + ) def test_update_user_id_fails(self): user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) user = PROVIDERS.identity_api.create_user(user) original_id = user['id'] user['id'] = 'fake2' - self.assertRaises(exception.ValidationError, - PROVIDERS.identity_api.update_user, - original_id, - user) + self.assertRaises( + exception.ValidationError, + PROVIDERS.identity_api.update_user, + original_id, + user, + ) user_ref = PROVIDERS.identity_api.get_user(original_id) self.assertEqual(original_id, user_ref['id']) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, - 'fake2') + self.assertRaises( + exception.UserNotFound, PROVIDERS.identity_api.get_user, 'fake2' + ) def test_delete_user_with_group_project_domain_links(self): role1 = unit.new_role_ref() @@ -311,21 +343,23 @@ class IdentityTests(object): user_id=user1['id'], group_id=group1['id'] ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) + user_id=user1['id'], project_id=project1['id'] + ) self.assertEqual(1, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) + user_id=user1['id'], domain_id=domain1['id'] + ) self.assertEqual(1, len(roles_ref)) PROVIDERS.identity_api.check_user_in_group( - user_id=user1['id'], - group_id=group1['id']) + user_id=user1['id'], group_id=group1['id'] + ) PROVIDERS.identity_api.delete_user(user1['id']) - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.check_user_in_group, - user1['id'], - group1['id']) + self.assertRaises( + exception.NotFound, + PROVIDERS.identity_api.check_user_in_group, + user1['id'], + group1['id'], + ) def test_delete_group_with_user_project_domain_links(self): role1 = unit.new_role_ref() @@ -340,8 +374,9 @@ class IdentityTests(object): group1 = PROVIDERS.identity_api.create_group(group1) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=project1['id'], - role_id=role1['id'] + group_id=group1['id'], + project_id=project1['id'], + role_id=role1['id'], ) PROVIDERS.assignment_api.create_grant( group_id=group1['id'], domain_id=domain1['id'], role_id=role1['id'] @@ -350,35 +385,39 @@ class IdentityTests(object): user_id=user1['id'], group_id=group1['id'] ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) + group_id=group1['id'], project_id=project1['id'] + ) self.assertEqual(1, len(roles_ref)) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) + group_id=group1['id'], domain_id=domain1['id'] + ) self.assertEqual(1, len(roles_ref)) PROVIDERS.identity_api.check_user_in_group( - user_id=user1['id'], - group_id=group1['id']) + user_id=user1['id'], group_id=group1['id'] + ) PROVIDERS.identity_api.delete_group(group1['id']) PROVIDERS.identity_api.get_user(user1['id']) def test_update_user_returns_not_found(self): user_id = uuid.uuid4().hex - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.update_user, - user_id, - {'id': user_id, - 'domain_id': CONF.identity.default_domain_id}) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.update_user, + user_id, + {'id': user_id, 'domain_id': CONF.identity.default_domain_id}, + ) def test_delete_user_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.delete_user, - uuid.uuid4().hex) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.delete_user, + uuid.uuid4().hex, + ) def test_create_user_with_long_password(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id, - password='a' * 2000) + user = unit.new_user_ref( + domain_id=CONF.identity.default_domain_id, password='a' * 2000 + ) # success create a user with long password PROVIDERS.identity_api.create_user(user) @@ -389,49 +428,60 @@ class IdentityTests(object): # Make sure the user is not allowed to login # with a password that is empty string or None with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=user['id'], - password='') - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=user['id'], - password=None) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=user['id'], + password='', + ) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=user['id'], + password=None, + ) def test_create_user_none_password(self): - user = unit.new_user_ref(password=None, - domain_id=CONF.identity.default_domain_id) + user = unit.new_user_ref( + password=None, domain_id=CONF.identity.default_domain_id + ) user = PROVIDERS.identity_api.create_user(user) PROVIDERS.identity_api.get_user(user['id']) # Make sure the user is not allowed to login # with a password that is empty string or None with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=user['id'], - password='') - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=user['id'], - password=None) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=user['id'], + password='', + ) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=user['id'], + password=None, + ) def test_list_users(self): users = PROVIDERS.identity_api.list_users( domain_scope=self._set_domain_scope( - CONF.identity.default_domain_id)) + CONF.identity.default_domain_id + ) + ) self.assertEqual(len(default_fixtures.USERS), len(users)) user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['name'])['id'] - for user in default_fixtures.USERS) + expected_user_ids = set( + getattr(self, 'user_%s' % user['name'])['id'] + for user in default_fixtures.USERS + ) for user_ref in users: self.assertNotIn('password', user_ref) self.assertEqual(expected_user_ids, user_ids) def _build_hints(self, hints, filters, fed_dict): for key in filters: - hints.add_filter(key, - fed_dict[key], - comparator='equals') + hints.add_filter(key, fed_dict[key], comparator='equals') return hints def _build_fed_resource(self): @@ -439,18 +489,20 @@ class IdentityTests(object): # test. new_mapping = unit.new_mapping_ref() PROVIDERS.federation_api.create_mapping(new_mapping['id'], new_mapping) - for idp_id, protocol_id in [('ORG_IDP', 'saml2'), - ('myidp', 'mapped')]: - new_idp = unit.new_identity_provider_ref(idp_id=idp_id, - domain_id='default') - new_protocol = unit.new_protocol_ref(protocol_id=protocol_id, - idp_id=idp_id, - mapping_id=new_mapping['id']) + for idp_id, protocol_id in [('ORG_IDP', 'saml2'), ('myidp', 'mapped')]: + new_idp = unit.new_identity_provider_ref( + idp_id=idp_id, domain_id='default' + ) + new_protocol = unit.new_protocol_ref( + protocol_id=protocol_id, + idp_id=idp_id, + mapping_id=new_mapping['id'], + ) PROVIDERS.federation_api.create_idp(new_idp['id'], new_idp) - PROVIDERS.federation_api.create_protocol(new_idp['id'], - new_protocol['id'], - new_protocol) + PROVIDERS.federation_api.create_protocol( + new_idp['id'], new_protocol['id'], new_protocol + ) def _test_list_users_with_attribute(self, filters, fed_dict): self._build_fed_resource() @@ -539,9 +591,11 @@ class IdentityTests(object): def test_list_users_with_name(self): self._build_fed_resource() federated_dict_1 = unit.new_federated_user_ref( - display_name='test1@federation.org') + display_name='test1@federation.org' + ) federated_dict_2 = unit.new_federated_user_ref( - display_name='test2@federation.org') + display_name='test2@federation.org' + ) domain = self._get_domain_fixture() hints = driver_hints.Hints() @@ -549,10 +603,12 @@ class IdentityTests(object): users = self.identity_api.list_users(hints=hints) self.assertEqual(0, len(users)) - self.shadow_users_api.create_federated_user(domain['id'], - federated_dict_1) - self.shadow_users_api.create_federated_user(domain['id'], - federated_dict_2) + self.shadow_users_api.create_federated_user( + domain['id'], federated_dict_1 + ) + self.shadow_users_api.create_federated_user( + domain['id'], federated_dict_2 + ) hints = driver_hints.Hints() hints.add_filter('name', 'test1@federation.org') users = self.identity_api.list_users(hints=hints) @@ -571,7 +627,9 @@ class IdentityTests(object): group2 = PROVIDERS.identity_api.create_group(group2) groups = PROVIDERS.identity_api.list_groups( domain_scope=self._set_domain_scope( - CONF.identity.default_domain_id)) + CONF.identity.default_domain_id + ) + ) self.assertEqual(2, len(groups)) group_ids = [] for group in groups: @@ -648,7 +706,7 @@ class IdentityTests(object): found = False for x in groups: - if (x['id'] == new_group['id']): + if x['id'] == new_group['id']: found = True self.assertTrue(found) @@ -656,22 +714,28 @@ class IdentityTests(object): domain = self._get_domain_fixture() new_user = unit.new_user_ref(domain_id=domain['id']) new_user = PROVIDERS.identity_api.create_user(new_user) - self.assertRaises(exception.GroupNotFound, - PROVIDERS.identity_api.add_user_to_group, - new_user['id'], - uuid.uuid4().hex) + self.assertRaises( + exception.GroupNotFound, + PROVIDERS.identity_api.add_user_to_group, + new_user['id'], + uuid.uuid4().hex, + ) new_group = unit.new_group_ref(domain_id=domain['id']) new_group = PROVIDERS.identity_api.create_group(new_group) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.add_user_to_group, - uuid.uuid4().hex, - new_group['id']) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.add_user_to_group, + uuid.uuid4().hex, + new_group['id'], + ) - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.add_user_to_group, - uuid.uuid4().hex, - uuid.uuid4().hex) + self.assertRaises( + exception.NotFound, + PROVIDERS.identity_api.add_user_to_group, + uuid.uuid4().hex, + uuid.uuid4().hex, + ) def test_check_user_in_group(self): domain = self._get_domain_fixture() @@ -688,39 +752,49 @@ class IdentityTests(object): def test_check_user_not_in_group(self): new_group = unit.new_group_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) new_group = PROVIDERS.identity_api.create_group(new_group) new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) new_user = PROVIDERS.identity_api.create_user(new_user) - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.check_user_in_group, - new_user['id'], - new_group['id']) + self.assertRaises( + exception.NotFound, + PROVIDERS.identity_api.check_user_in_group, + new_user['id'], + new_group['id'], + ) def test_check_user_in_group_returns_not_found(self): new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) new_user = PROVIDERS.identity_api.create_user(new_user) new_group = unit.new_group_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) new_group = PROVIDERS.identity_api.create_group(new_group) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.check_user_in_group, - uuid.uuid4().hex, - new_group['id']) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.check_user_in_group, + uuid.uuid4().hex, + new_group['id'], + ) - self.assertRaises(exception.GroupNotFound, - PROVIDERS.identity_api.check_user_in_group, - new_user['id'], - uuid.uuid4().hex) + self.assertRaises( + exception.GroupNotFound, + PROVIDERS.identity_api.check_user_in_group, + new_user['id'], + uuid.uuid4().hex, + ) - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.check_user_in_group, - uuid.uuid4().hex, - uuid.uuid4().hex) + self.assertRaises( + exception.NotFound, + PROVIDERS.identity_api.check_user_in_group, + uuid.uuid4().hex, + uuid.uuid4().hex, + ) def test_list_users_in_group(self): domain = self._get_domain_fixture() @@ -739,15 +813,17 @@ class IdentityTests(object): user_refs = PROVIDERS.identity_api.list_users_in_group(new_group['id']) found = False for x in user_refs: - if (x['id'] == new_user['id']): + if x['id'] == new_user['id']: found = True self.assertNotIn('password', x) self.assertTrue(found) def test_list_users_in_group_returns_not_found(self): - self.assertRaises(exception.GroupNotFound, - PROVIDERS.identity_api.list_users_in_group, - uuid.uuid4().hex) + self.assertRaises( + exception.GroupNotFound, + PROVIDERS.identity_api.list_users_in_group, + uuid.uuid4().hex, + ) def test_list_groups_for_user(self): domain = self._get_domain_fixture() @@ -765,7 +841,8 @@ class IdentityTests(object): for x in range(0, USER_COUNT): group_refs = PROVIDERS.identity_api.list_groups_for_user( - test_users[x]['id']) + test_users[x]['id'] + ) self.assertEqual(0, len(group_refs)) for x in range(0, GROUP_COUNT): @@ -778,18 +855,21 @@ class IdentityTests(object): # add the user to the group and ensure that the # group count increases by one for each group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(before_count, len(group_refs)) PROVIDERS.identity_api.add_user_to_group( - positive_user['id'], - new_group['id']) + positive_user['id'], new_group['id'] + ) group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(after_count, len(group_refs)) # Make sure the group count for the unrelated user did not change group_refs = PROVIDERS.identity_api.list_groups_for_user( - negative_user['id']) + negative_user['id'] + ) self.assertEqual(0, len(group_refs)) def test_remove_user_from_group(self): @@ -815,20 +895,26 @@ class IdentityTests(object): new_user = PROVIDERS.identity_api.create_user(new_user) new_group = unit.new_group_ref(domain_id=domain['id']) new_group = PROVIDERS.identity_api.create_group(new_group) - self.assertRaises(exception.GroupNotFound, - PROVIDERS.identity_api.remove_user_from_group, - new_user['id'], - uuid.uuid4().hex) + self.assertRaises( + exception.GroupNotFound, + PROVIDERS.identity_api.remove_user_from_group, + new_user['id'], + uuid.uuid4().hex, + ) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.remove_user_from_group, - uuid.uuid4().hex, - new_group['id']) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.remove_user_from_group, + uuid.uuid4().hex, + new_group['id'], + ) - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.remove_user_from_group, - uuid.uuid4().hex, - uuid.uuid4().hex) + self.assertRaises( + exception.NotFound, + PROVIDERS.identity_api.remove_user_from_group, + uuid.uuid4().hex, + uuid.uuid4().hex, + ) def test_group_crud(self): domain = unit.new_domain_ref() @@ -844,20 +930,22 @@ class IdentityTests(object): self.assertLessEqual(group.items(), group_ref.items()) PROVIDERS.identity_api.delete_group(group['id']) - self.assertRaises(exception.GroupNotFound, - PROVIDERS.identity_api.get_group, - group['id']) + self.assertRaises( + exception.GroupNotFound, + PROVIDERS.identity_api.get_group, + group['id'], + ) def test_create_group_name_with_trailing_whitespace(self): group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group_name = group['name'] = (group['name'] + ' ') + group_name = group['name'] = group['name'] + ' ' group_returned = PROVIDERS.identity_api.create_group(group) self.assertEqual(group_returned['name'], group_name.strip()) def test_update_group_name_with_trailing_whitespace(self): group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) group_create = PROVIDERS.identity_api.create_group(group) - group_name = group['name'] = (group['name'] + ' ') + group_name = group['name'] = group['name'] + ' ' group_update = PROVIDERS.identity_api.update_group( group_create['id'], group ) @@ -872,14 +960,17 @@ class IdentityTests(object): PROVIDERS.identity_api.create_group(spoiler) group_ref = PROVIDERS.identity_api.get_group_by_name( - group_name, CONF.identity.default_domain_id) + group_name, CONF.identity.default_domain_id + ) self.assertDictEqual(group, group_ref) def test_get_group_by_name_returns_not_found(self): - self.assertRaises(exception.GroupNotFound, - PROVIDERS.identity_api.get_group_by_name, - uuid.uuid4().hex, - CONF.identity.default_domain_id) + self.assertRaises( + exception.GroupNotFound, + PROVIDERS.identity_api.get_group_by_name, + uuid.uuid4().hex, + CONF.identity.default_domain_id, + ) @unit.skip_if_cache_disabled('identity') def test_cache_layer_group_crud(self): @@ -901,8 +992,11 @@ class IdentityTests(object): PROVIDERS.identity_api.get_group.invalidate( PROVIDERS.identity_api, group['id'] ) - self.assertRaises(exception.GroupNotFound, - PROVIDERS.identity_api.get_group, group['id']) + self.assertRaises( + exception.GroupNotFound, + PROVIDERS.identity_api.get_group, + group['id'], + ) group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) group = PROVIDERS.identity_api.create_group(group) @@ -913,24 +1007,26 @@ class IdentityTests(object): # after updating through identity api, get updated group self.assertLessEqual( PROVIDERS.identity_api.get_group(group['id']).items(), - group_ref.items() + group_ref.items(), ) def test_create_duplicate_group_name_fails(self): group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group2 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id, - name=group1['name']) + group2 = unit.new_group_ref( + domain_id=CONF.identity.default_domain_id, name=group1['name'] + ) group1 = PROVIDERS.identity_api.create_group(group1) - self.assertRaises(exception.Conflict, - PROVIDERS.identity_api.create_group, - group2) + self.assertRaises( + exception.Conflict, PROVIDERS.identity_api.create_group, group2 + ) def test_create_duplicate_group_name_in_different_domains(self): new_domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain) group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group2 = unit.new_group_ref(domain_id=new_domain['id'], - name=group1['name']) + group2 = unit.new_group_ref( + domain_id=new_domain['id'], name=group1['name'] + ) group1 = PROVIDERS.identity_api.create_group(group1) group2 = PROVIDERS.identity_api.create_group(group2) @@ -942,13 +1038,17 @@ class IdentityTests(object): group = unit.new_group_ref(domain_id=domain1['id']) group = PROVIDERS.identity_api.create_group(group) group['domain_id'] = domain2['id'] - self.assertRaises(exception.ValidationError, - PROVIDERS.identity_api.update_group, - group['id'], group) + self.assertRaises( + exception.ValidationError, + PROVIDERS.identity_api.update_group, + group['id'], + group, + ) def test_user_crud(self): user_dict = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) del user_dict['id'] user = PROVIDERS.identity_api.create_user(user_dict) user_ref = PROVIDERS.identity_api.get_user(user['id']) @@ -964,15 +1064,16 @@ class IdentityTests(object): self.assertLessEqual(user_dict.items(), user_ref_dict.items()) PROVIDERS.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, - user['id']) + self.assertRaises( + exception.UserNotFound, PROVIDERS.identity_api.get_user, user['id'] + ) def test_arbitrary_attributes_are_returned_from_create_user(self): attr_value = uuid.uuid4().hex user_data = unit.new_user_ref( domain_id=CONF.identity.default_domain_id, - arbitrary_attr=attr_value) + arbitrary_attr=attr_value, + ) user = PROVIDERS.identity_api.create_user(user_data) @@ -982,7 +1083,8 @@ class IdentityTests(object): attr_value = uuid.uuid4().hex user_data = unit.new_user_ref( domain_id=CONF.identity.default_domain_id, - arbitrary_attr=attr_value) + arbitrary_attr=attr_value, + ) user_data = PROVIDERS.identity_api.create_user(user_data) @@ -991,7 +1093,8 @@ class IdentityTests(object): def test_new_arbitrary_attributes_are_returned_from_update_user(self): user_data = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) user = PROVIDERS.identity_api.create_user(user_data) attr_value = uuid.uuid4().hex @@ -1004,7 +1107,8 @@ class IdentityTests(object): attr_value = uuid.uuid4().hex user_data = unit.new_user_ref( domain_id=CONF.identity.default_domain_id, - arbitrary_attr=attr_value) + arbitrary_attr=attr_value, + ) new_attr_value = uuid.uuid4().hex user = PROVIDERS.identity_api.create_user(user_data) @@ -1020,7 +1124,8 @@ class IdentityTests(object): updated_user = {'enabled': False} updated_user_ref = PROVIDERS.identity_api.update_user( - user['id'], updated_user) + user['id'], updated_user + ) # SQL backend adds 'extra' field updated_user_ref.pop('extra', None) @@ -1037,8 +1142,9 @@ class IdentityTests(object): for _ in range(domain_count): domain_name = '%s-%s' % (domain_name_prefix, uuid.uuid4().hex) domain = unit.new_domain_ref(name=domain_name) - self.domain_list[domain_name] = \ + self.domain_list[domain_name] = ( PROVIDERS.resource_api.create_domain(domain['id'], domain) + ) def clean_up_domains(): for _, domain in self.domain_list.items(): @@ -1072,10 +1178,12 @@ class IdentityTests(object): hints.add_filter('name', 'domaingroup1', comparator='startswith') entities = PROVIDERS.resource_api.list_domains(hints=hints) self.assertThat(entities, matchers.HasLength(2)) - self.assertThat(entities[0]['name'], - matchers.StartsWith('domaingroup1')) - self.assertThat(entities[1]['name'], - matchers.StartsWith('domaingroup1')) + self.assertThat( + entities[0]['name'], matchers.StartsWith('domaingroup1') + ) + self.assertThat( + entities[1]['name'], matchers.StartsWith('domaingroup1') + ) @unit.skip_if_no_multiple_domains_support def test_list_limit_for_domains(self): @@ -1083,12 +1191,14 @@ class IdentityTests(object): for _ in range(count): domain = unit.new_domain_ref() self.domain_list.append( - PROVIDERS.resource_api.create_domain(domain['id'], domain)) + PROVIDERS.resource_api.create_domain(domain['id'], domain) + ) def clean_up_domains(): for domain in self.domain_list: PROVIDERS.resource_api.update_domain( - domain['id'], {'enabled': False}) + domain['id'], {'enabled': False} + ) PROVIDERS.resource_api.delete_domain(domain['id']) self.domain_list = [] @@ -1130,26 +1240,27 @@ class FilterTests(filtering.FilterTests): 8: 'The Ministry of Silly', 9: 'The Ministry of Silly Walks', # ...and one for useful case insensitivity testing - 10: 'The ministry of silly walks OF' + 10: 'The ministry of silly walks OF', } user_list = self._create_test_data( - 'user', 20, domain_id=CONF.identity.default_domain_id, - name_dict=user_name_data) + 'user', + 20, + domain_id=CONF.identity.default_domain_id, + name_dict=user_name_data, + ) hints = driver_hints.Hints() hints.add_filter('name', 'ministry', comparator='contains') users = PROVIDERS.identity_api.list_users(hints=hints) self.assertEqual(5, len(users)) - self._match_with_list(users, user_list, - list_start=6, list_end=11) + self._match_with_list(users, user_list, list_start=6, list_end=11) # TODO(henry-nash) Check inexact filter has been removed. hints = driver_hints.Hints() hints.add_filter('name', 'The', comparator='startswith') users = PROVIDERS.identity_api.list_users(hints=hints) self.assertEqual(6, len(users)) - self._match_with_list(users, user_list, - list_start=5, list_end=11) + self._match_with_list(users, user_list, list_start=5, list_end=11) # TODO(henry-nash) Check inexact filter has been removed. hints = driver_hints.Hints() @@ -1185,9 +1296,11 @@ class FilterTests(filtering.FilterTests): 9: 'The Ministry of Silly Walks', } group_list = self._create_test_data( - 'group', number_of_groups, + 'group', + number_of_groups, domain_id=CONF.identity.default_domain_id, - name_dict=group_name_data) + name_dict=group_name_data, + ) user_list = self._create_test_data('user', 2) for group in range(7): @@ -1226,7 +1339,8 @@ class FilterTests(filtering.FilterTests): hints = driver_hints.Hints() hints.add_filter('name', 'Ministry', comparator='contains') groups = PROVIDERS.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) + user_list[0]['id'], hints=hints + ) # We should only get back one group, since of the two that contain # 'Ministry' the user only belongs to one. self.assertThat(len(groups), matchers.Equals(1)) @@ -1235,7 +1349,8 @@ class FilterTests(filtering.FilterTests): hints = driver_hints.Hints() hints.add_filter('name', 'The', comparator='startswith') groups = PROVIDERS.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) + user_list[0]['id'], hints=hints + ) # We should only get back 2 out of the 3 groups that start with 'The' # hence showing that both "filters" have been applied self.assertThat(len(groups), matchers.Equals(2)) @@ -1244,7 +1359,8 @@ class FilterTests(filtering.FilterTests): hints.add_filter('name', 'The', comparator='endswith') groups = PROVIDERS.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) + user_list[0]['id'], hints=hints + ) # We should only get back one group since it is the only one that # ends with 'The' self.assertThat(len(groups), matchers.Equals(1)) @@ -1259,7 +1375,8 @@ class FilterTests(filtering.FilterTests): hints = driver_hints.Hints() hints.add_filter('name', 'The Ministry', comparator='equals') groups = PROVIDERS.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) + user_list[0]['id'], hints=hints + ) # We should only get back 1 out of the 3 groups with name 'The # Ministry' hence showing that both "filters" have been applied. self.assertEqual(1, len(groups)) @@ -1309,11 +1426,14 @@ class FilterTests(filtering.FilterTests): 9: 'Arthur Schopenhauer', } user_list = self._create_test_data( - 'user', number_of_users, + 'user', + number_of_users, domain_id=CONF.identity.default_domain_id, - name_dict=user_name_data) + name_dict=user_name_data, + ) group = self._create_one_entity( - 'group', CONF.identity.default_domain_id, 'Great Writers') + 'group', CONF.identity.default_domain_id, 'Great Writers' + ) for i in range(7): PROVIDERS.identity_api.add_user_to_group( user_list[i]['id'], group['id'] diff --git a/keystone/tests/unit/identity/test_core.py b/keystone/tests/unit/identity/test_core.py index ee077daad6..918942b1de 100644 --- a/keystone/tests/unit/identity/test_core.py +++ b/keystone/tests/unit/identity/test_core.py @@ -44,8 +44,9 @@ class TestDomainConfigs(unit.BaseTestCase): self.tmp_dir = unit.dirs.tmp() self.config_fixture = self.useFixture(config_fixture.Config(CONF)) - self.config_fixture.config(domain_config_dir=self.tmp_dir, - group='identity') + self.config_fixture.config( + domain_config_dir=self.tmp_dir, group='identity' + ) def test_config_for_nonexistent_domain(self): """Having a config for a non-existent domain will be ignored. @@ -56,8 +57,9 @@ class TestDomainConfigs(unit.BaseTestCase): """ domain_id = uuid.uuid4().hex - domain_config_filename = os.path.join(self.tmp_dir, - 'keystone.%s.conf' % domain_id) + domain_config_filename = os.path.join( + self.tmp_dir, 'keystone.%s.conf' % domain_id + ) self.addCleanup(lambda: os.remove(domain_config_filename)) with open(domain_config_filename, 'w'): """Write an empty config file.""" @@ -68,28 +70,32 @@ class TestDomainConfigs(unit.BaseTestCase): domain_config = identity.DomainConfigs() fake_standard_driver = None - domain_config.setup_domain_drivers(fake_standard_driver, - mock_assignment_api) + domain_config.setup_domain_drivers( + fake_standard_driver, mock_assignment_api + ) def test_config_for_dot_name_domain(self): # Ensure we can get the right domain name which has dots within it # from filename. - domain_config_filename = os.path.join(self.tmp_dir, - 'keystone.abc.def.com.conf') + domain_config_filename = os.path.join( + self.tmp_dir, 'keystone.abc.def.com.conf' + ) with open(domain_config_filename, 'w'): """Write an empty config file.""" self.addCleanup(os.remove, domain_config_filename) - with mock.patch.object(identity.DomainConfigs, - '_load_config_from_file') as mock_load_config: + with mock.patch.object( + identity.DomainConfigs, '_load_config_from_file' + ) as mock_load_config: domain_config = identity.DomainConfigs() fake_assignment_api = None fake_standard_driver = None - domain_config.setup_domain_drivers(fake_standard_driver, - fake_assignment_api) - mock_load_config.assert_called_once_with(fake_assignment_api, - [domain_config_filename], - 'abc.def.com') + domain_config.setup_domain_drivers( + fake_standard_driver, fake_assignment_api + ) + mock_load_config.assert_called_once_with( + fake_assignment_api, [domain_config_filename], 'abc.def.com' + ) def test_config_for_multiple_sql_backend(self): domains_config = identity.DomainConfigs() @@ -103,30 +109,40 @@ class TestDomainConfigs(unit.BaseTestCase): drv = mock.Mock(is_sql=is_sql) drivers.append(drv) name = 'dummy.{0}'.format(idx) - files.append(''.join(( - identity.DOMAIN_CONF_FHEAD, - name, - identity.DOMAIN_CONF_FTAIL))) + files.append( + ''.join( + ( + identity.DOMAIN_CONF_FHEAD, + name, + identity.DOMAIN_CONF_FTAIL, + ) + ) + ) def walk_fake(*a, **kwa): - return ('/fake/keystone/domains/config', [], files), + return (('/fake/keystone/domains/config', [], files),) generic_driver = mock.Mock(is_sql=False) assignment_api = mock.Mock() id_factory = itertools.count() - assignment_api.get_domain_by_name.side_effect = ( - lambda name: {'id': next(id_factory), '_': 'fake_domain'}) + assignment_api.get_domain_by_name.side_effect = lambda name: { + 'id': next(id_factory), + '_': 'fake_domain', + } load_driver_mock = mock.Mock(side_effect=drivers) with mock.patch.object(os, 'walk', walk_fake): with mock.patch.object(identity.cfg, 'ConfigOpts'): - with mock.patch.object(domains_config, '_load_driver', - load_driver_mock): + with mock.patch.object( + domains_config, '_load_driver', load_driver_mock + ): self.assertRaises( exception.MultipleSQLDriversInConfig, domains_config.setup_domain_drivers, - generic_driver, assignment_api) + generic_driver, + assignment_api, + ) self.assertEqual(3, load_driver_mock.call_count) @@ -138,27 +154,33 @@ class TestDatabaseDomainConfigs(unit.TestCase): self.useFixture(database.Database()) self.load_backends() PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) def test_domain_config_in_database_disabled_by_default(self): self.assertFalse(CONF.identity.domain_configurations_from_database) def test_loading_config_from_database(self): - self.config_fixture.config(domain_configurations_from_database=True, - group='identity') + self.config_fixture.config( + domain_configurations_from_database=True, group='identity' + ) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) # Override two config options for our domain - conf = {'ldap': {'url': uuid.uuid4().hex, - 'suffix': uuid.uuid4().hex, - 'use_tls': True}, - 'identity': { - 'driver': 'ldap'}} + conf = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'suffix': uuid.uuid4().hex, + 'use_tls': True, + }, + 'identity': {'driver': 'ldap'}, + } PROVIDERS.domain_config_api.create_config(domain['id'], conf) fake_standard_driver = None domain_config = identity.DomainConfigs() - domain_config.setup_domain_drivers(fake_standard_driver, - PROVIDERS.resource_api) + domain_config.setup_domain_drivers( + fake_standard_driver, PROVIDERS.resource_api + ) # Make sure our two overrides are in place, and others are not affected res = domain_config.get_domain_conf(domain['id']) self.assertEqual(conf['ldap']['url'], res.ldap.url) @@ -167,18 +189,20 @@ class TestDatabaseDomainConfigs(unit.TestCase): # Make sure the override is not changing the type of the config value use_tls_type = type(CONF.ldap.use_tls) - self.assertEqual(use_tls_type(conf['ldap']['use_tls']), - res.ldap.use_tls) + self.assertEqual( + use_tls_type(conf['ldap']['use_tls']), res.ldap.use_tls + ) # Now turn off using database domain configuration and check that the # default config file values are now seen instead of the overrides. self.config_fixture.config( - group='identity', - domain_configurations_from_database=False) + group='identity', domain_configurations_from_database=False + ) domain_config = identity.DomainConfigs() - domain_config.setup_domain_drivers(fake_standard_driver, - PROVIDERS.resource_api) + domain_config.setup_domain_drivers( + fake_standard_driver, PROVIDERS.resource_api + ) res = domain_config.get_domain_conf(domain['id']) self.assertEqual(CONF.ldap.url, res.ldap.url) self.assertEqual(CONF.ldap.suffix, res.ldap.suffix) @@ -191,8 +215,10 @@ class TestDatabaseDomainConfigs(unit.TestCase): # Prepare fake driver extension = stevedore.extension.Extension( - name="foo", entry_point=None, - obj=fake_driver.FooDriver(), plugin=None + name="foo", + entry_point=None, + obj=fake_driver.FooDriver(), + plugin=None, ) fake_driver_manager = stevedore.DriverManager.make_test_instance( extension, namespace="keystone.identity" @@ -209,14 +235,15 @@ class TestDatabaseDomainConfigs(unit.TestCase): ) self.config_fixture.config( additional_whitelisted_options={"foo": ["opt1"]}, - group="domain_config" + group="domain_config", ) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain["id"], domain) # Override two config options for our domain conf = { "foo": {"opt1": uuid.uuid4().hex}, - "identity": {"driver": "foo"}} + "identity": {"driver": "foo"}, + } PROVIDERS.domain_config_api.create_config(domain["id"], conf) domain_config = identity.DomainConfigs() domain_config.setup_domain_drivers("foo", PROVIDERS.resource_api) diff --git a/keystone/tests/unit/ksfixtures/__init__.py b/keystone/tests/unit/ksfixtures/__init__.py index a5fedbfc83..d1e441f35e 100644 --- a/keystone/tests/unit/ksfixtures/__init__.py +++ b/keystone/tests/unit/ksfixtures/__init__.py @@ -11,10 +11,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from keystone.tests.unit.ksfixtures.auth_plugins import ConfigAuthPlugins # noqa +from keystone.tests.unit.ksfixtures.auth_plugins import ( + ConfigAuthPlugins, +) # noqa from keystone.tests.unit.ksfixtures.backendloader import BackendLoader # noqa from keystone.tests.unit.ksfixtures.cache import Cache # noqa -from keystone.tests.unit.ksfixtures.jws_key_repository import JWSKeyRepository # noqa +from keystone.tests.unit.ksfixtures.jws_key_repository import ( + JWSKeyRepository, +) # noqa from keystone.tests.unit.ksfixtures.key_repository import KeyRepository # noqa from keystone.tests.unit.ksfixtures.logging import StandardLogging # noqa from keystone.tests.unit.ksfixtures.policy import Policy # noqa diff --git a/keystone/tests/unit/ksfixtures/auth_plugins.py b/keystone/tests/unit/ksfixtures/auth_plugins.py index 462046d976..1e1c7ac1cf 100644 --- a/keystone/tests/unit/ksfixtures/auth_plugins.py +++ b/keystone/tests/unit/ksfixtures/auth_plugins.py @@ -52,8 +52,7 @@ class LoadAuthPlugins(fixtures.Fixture): for method_name in self.method_names: if method_name in AUTH_METHODS: self.saved[method_name] = AUTH_METHODS[method_name] - AUTH_METHODS[method_name] = auth.core.load_auth_method( - method_name) + AUTH_METHODS[method_name] = auth.core.load_auth_method(method_name) auth.core.AUTH_PLUGINS_LOADED = True def cleanUp(self): diff --git a/keystone/tests/unit/ksfixtures/cache.py b/keystone/tests/unit/ksfixtures/cache.py index 86293a5f31..9707c7a4e8 100644 --- a/keystone/tests/unit/ksfixtures/cache.py +++ b/keystone/tests/unit/ksfixtures/cache.py @@ -18,8 +18,11 @@ from keystone.common import cache from keystone import revoke -CACHE_REGIONS = (cache.CACHE_REGION, catalog.COMPUTED_CATALOG_REGION, - revoke.REVOKE_REGION) +CACHE_REGIONS = ( + cache.CACHE_REGION, + catalog.COMPUTED_CATALOG_REGION, + revoke.REVOKE_REGION, +) class Cache(fixtures.Fixture): diff --git a/keystone/tests/unit/ksfixtures/database.py b/keystone/tests/unit/ksfixtures/database.py index a9a2bcf359..126d6f11a8 100644 --- a/keystone/tests/unit/ksfixtures/database.py +++ b/keystone/tests/unit/ksfixtures/database.py @@ -32,25 +32,26 @@ def run_once(f): The decorated function is assumed to have a one parameter. """ + @functools.wraps(f) def wrapper(): if not wrapper.already_ran: f() wrapper.already_ran = True + wrapper.already_ran = False return wrapper # NOTE(I159): Every execution all the options will be cleared. The method must # be called at the every fixture initialization. -def initialize_sql_session(connection_str=unit.IN_MEM_DB_CONN_STRING, - enforce_sqlite_fks=True): +def initialize_sql_session( + connection_str=unit.IN_MEM_DB_CONN_STRING, enforce_sqlite_fks=True +): # Make sure the DB is located in the correct location, in this case set # the default value, as this should be able to be overridden in some # test cases. - db_options.set_defaults( - CONF, - connection=connection_str) + db_options.set_defaults(CONF, connection=connection_str) # Enable the Sqlite FKs for global engine by default. facade = enginefacade.writer @@ -77,19 +78,19 @@ def _load_sqlalchemy_models(): the test run avoids this problem. """ - keystone_root = os.path.normpath(os.path.join( - os.path.dirname(__file__), '..', '..', '..')) + keystone_root = os.path.normpath( + os.path.join(os.path.dirname(__file__), '..', '..', '..') + ) for root, dirs, files in os.walk(keystone_root): # NOTE(morganfainberg): Slice the keystone_root off the root to ensure # we do not end up with a module name like: # Users.home.openstack.keystone.assignment.backends.sql - root = root[len(keystone_root):] + root = root[len(keystone_root) :] if root.endswith('backends') and 'sql.py' in files: # The root will be prefixed with an instance of os.sep, which will # make the root after replacement '.', the 'keystone' part # of the module path is always added to the front - module_root = ('keystone.%s' % - root.replace(os.sep, '.').lstrip('.')) + module_root = 'keystone.%s' % root.replace(os.sep, '.').lstrip('.') module_components = module_root.split('.') module_without_backends = '' for x in range(0, len(module_components) - 1): diff --git a/keystone/tests/unit/ksfixtures/hacking.py b/keystone/tests/unit/ksfixtures/hacking.py index c7c2622f1c..c04d898d76 100644 --- a/keystone/tests/unit/ksfixtures/hacking.py +++ b/keystone/tests/unit/ksfixtures/hacking.py @@ -78,7 +78,8 @@ class HackingCode(fixtures.Fixture): (28, 26, 'K001'), (29, 21, 'K001'), (32, 10, 'K001'), - ]} + ], + } # NOTE(browne): This is gross, but in Python 3.4 and earlier, the ast # module returns the incorrect col_offset for two of the defined functions @@ -105,7 +106,8 @@ class HackingCode(fixtures.Fixture): """, 'expected_errors': [ (3, 0, 'K002'), - ]} + ], + } asserting_none_equality = { 'code': """ @@ -125,7 +127,8 @@ class HackingCode(fixtures.Fixture): (6, 8, 'K003'), (7, 8, 'K004'), (8, 8, 'K004'), - ]} + ], + } dict_constructor = { 'code': """ @@ -140,7 +143,8 @@ class HackingCode(fixtures.Fixture): (3, 0, 'K008'), (4, 0, 'K008'), (5, 0, 'K008'), - ]} + ], + } class HackingTranslations(fixtures.Fixture): diff --git a/keystone/tests/unit/ksfixtures/jws_key_repository.py b/keystone/tests/unit/ksfixtures/jws_key_repository.py index c6cca461bd..2ba354706a 100644 --- a/keystone/tests/unit/ksfixtures/jws_key_repository.py +++ b/keystone/tests/unit/ksfixtures/jws_key_repository.py @@ -34,11 +34,11 @@ class JWSKeyRepository(fixtures.Fixture): # set config to use temporary paths self.config_fixture.config( group=self.key_group, - jws_private_key_repository=private_key_directory + jws_private_key_repository=private_key_directory, ) self.config_fixture.config( group=self.key_group, - jws_public_key_repository=public_key_directory + jws_public_key_repository=public_key_directory, ) # create temporary repositories diff --git a/keystone/tests/unit/ksfixtures/key_repository.py b/keystone/tests/unit/ksfixtures/key_repository.py index 45be418475..c43fc11095 100644 --- a/keystone/tests/unit/ksfixtures/key_repository.py +++ b/keystone/tests/unit/ksfixtures/key_repository.py @@ -25,13 +25,12 @@ class KeyRepository(fixtures.Fixture): def setUp(self): super(KeyRepository, self).setUp() directory = self.useFixture(fixtures.TempDir()).path - self.config_fixture.config(group=self.key_group, - key_repository=directory) + self.config_fixture.config( + group=self.key_group, key_repository=directory + ) fernet_utils = utils.FernetUtils( - directory, - self.max_active_keys, - self.key_group + directory, self.max_active_keys, self.key_group ) fernet_utils.create_key_directory() fernet_utils.initialize_key_repository() diff --git a/keystone/tests/unit/ksfixtures/logging.py b/keystone/tests/unit/ksfixtures/logging.py index 419880deb4..d959dbdff5 100644 --- a/keystone/tests/unit/ksfixtures/logging.py +++ b/keystone/tests/unit/ksfixtures/logging.py @@ -75,7 +75,8 @@ class StandardLogging(fixtures.Fixture): # Collect logs fs = '%(asctime)s %(levelname)s [%(name)s] %(message)s' self.logger = self.useFixture( - fixtures.FakeLogger(format=fs, level=None)) + fixtures.FakeLogger(format=fs, level=None) + ) # TODO(sdague): why can't we send level through the fake # logger? Tests prove that it breaks, but it's worth getting # to the bottom of. @@ -88,13 +89,15 @@ class StandardLogging(fixtures.Fixture): handler.setLevel(std_logging.DEBUG) # Don't log every single DB migration step - std_logging.getLogger( - 'migrate.versioning.api').setLevel(std_logging.WARNING) + std_logging.getLogger('migrate.versioning.api').setLevel( + std_logging.WARNING + ) # Or alembic for model comparisons. std_logging.getLogger('alembic').setLevel(std_logging.WARNING) # Or oslo_db provisioning steps std_logging.getLogger('oslo_db.sqlalchemy').setLevel( - std_logging.WARNING) + std_logging.WARNING + ) # At times we end up calling back into main() functions in # testing. This has the possibility of calling logging.setup @@ -105,7 +108,8 @@ class StandardLogging(fixtures.Fixture): pass self.useFixture( - fixtures.MonkeyPatch('oslo_log.log.setup', fake_logging_setup)) + fixtures.MonkeyPatch('oslo_log.log.setup', fake_logging_setup) + ) def delete_stored_logs(self): # NOTE(gibi): this depends on the internals of the fixtures.FakeLogger. diff --git a/keystone/tests/unit/ksfixtures/policy.py b/keystone/tests/unit/ksfixtures/policy.py index 2adfe6a0a4..4a7c24c703 100644 --- a/keystone/tests/unit/ksfixtures/policy.py +++ b/keystone/tests/unit/ksfixtures/policy.py @@ -27,7 +27,8 @@ class Policy(fixtures.Fixture): def setUp(self): super(Policy, self).setUp() opts.set_defaults(self._config_fixture.conf) - self._config_fixture.config(group='oslo_policy', - policy_file=self._policy_file) + self._config_fixture.config( + group='oslo_policy', policy_file=self._policy_file + ) policy._ENFORCER.suppress_deprecation_warnings = True self.addCleanup(policy.reset) diff --git a/keystone/tests/unit/limit/test_backends.py b/keystone/tests/unit/limit/test_backends.py index 89307855e2..a046bf59b9 100644 --- a/keystone/tests/unit/limit/test_backends.py +++ b/keystone/tests/unit/limit/test_backends.py @@ -27,23 +27,34 @@ class RegisteredLimitTests(object): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex, - description='test description') + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + description='test description', + ) reg_limits = PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1]) + [registered_limit_1] + ) self.assertDictEqual(registered_limit_1, reg_limits[0]) # create another two, return them. registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='snapshot', default_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=5, + id=uuid.uuid4().hex, + ) registered_limit_3 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='backup', default_limit=5, id=uuid.uuid4().hex) + resource_name='backup', + default_limit=5, + id=uuid.uuid4().hex, + ) reg_limits = PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_2, registered_limit_3]) + [registered_limit_2, registered_limit_3] + ) self.assertEqual(2, len(reg_limits)) for reg_limit in reg_limits: if reg_limit['id'] == registered_limit_2['id']: @@ -55,39 +66,60 @@ class RegisteredLimitTests(object): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1]) + [registered_limit_1] + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) - self.assertRaises(exception.Conflict, - PROVIDERS.unified_limit_api.create_registered_limits, - [registered_limit_2]) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) + self.assertRaises( + exception.Conflict, + PROVIDERS.unified_limit_api.create_registered_limits, + [registered_limit_2], + ) def test_create_multi_registered_limits_duplicate(self): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1]) + [registered_limit_1] + ) # Create with a duplicated one and a normal one. Both of them will not # be created. registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_3 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='snapshot', default_limit=10, id=uuid.uuid4().hex) - self.assertRaises(exception.Conflict, - PROVIDERS.unified_limit_api.create_registered_limits, - [registered_limit_2, registered_limit_3]) + resource_name='snapshot', + default_limit=10, + id=uuid.uuid4().hex, + ) + self.assertRaises( + exception.Conflict, + PROVIDERS.unified_limit_api.create_registered_limits, + [registered_limit_2, registered_limit_3], + ) reg_limits = PROVIDERS.unified_limit_api.list_registered_limits() self.assertEqual(1, len(reg_limits)) @@ -97,38 +129,56 @@ class RegisteredLimitTests(object): registered_limit_1 = unit.new_registered_limit_ref( service_id=uuid.uuid4().hex, region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) - self.assertRaises(exception.ValidationError, - PROVIDERS.unified_limit_api.create_registered_limits, - [registered_limit_1]) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) + self.assertRaises( + exception.ValidationError, + PROVIDERS.unified_limit_api.create_registered_limits, + [registered_limit_1], + ) def test_create_registered_limit_invalid_region(self): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=uuid.uuid4().hex, - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) - self.assertRaises(exception.ValidationError, - PROVIDERS.unified_limit_api.create_registered_limits, - [registered_limit_1]) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) + self.assertRaises( + exception.ValidationError, + PROVIDERS.unified_limit_api.create_registered_limits, + [registered_limit_1], + ) def test_create_registered_limit_description_none(self): registered_limit = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex, - description=None) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + description=None, + ) res = PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit]) + [registered_limit] + ) self.assertIsNone(res[0]['description']) def test_create_registered_limit_without_description(self): registered_limit = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit.pop('description') res = PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit]) + [registered_limit] + ) self.assertIsNone(res[0]['description']) def test_update_registered_limit(self): @@ -136,118 +186,183 @@ class RegisteredLimitTests(object): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='snapshot', default_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=5, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1, registered_limit_2]) + [registered_limit_1, registered_limit_2] + ) expect_region = 'region_two' - registered_limit_update = {'id': registered_limit_1['id'], - 'region_id': expect_region} + registered_limit_update = { + 'id': registered_limit_1['id'], + 'region_id': expect_region, + } res = PROVIDERS.unified_limit_api.update_registered_limit( - registered_limit_1['id'], registered_limit_update) + registered_limit_1['id'], registered_limit_update + ) self.assertEqual(expect_region, res['region_id']) # 'id' can be omitted in the update body registered_limit_update = {'region_id': expect_region} res = PROVIDERS.unified_limit_api.update_registered_limit( - registered_limit_2['id'], registered_limit_update) + registered_limit_2['id'], registered_limit_update + ) self.assertEqual(expect_region, res['region_id']) def test_update_registered_limit_invalid_input_return_bad_request(self): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1]) + [registered_limit_1] + ) - update_ref = {'id': registered_limit_1['id'], - 'service_id': uuid.uuid4().hex} - self.assertRaises(exception.ValidationError, - PROVIDERS.unified_limit_api.update_registered_limit, - registered_limit_1['id'], update_ref) + update_ref = { + 'id': registered_limit_1['id'], + 'service_id': uuid.uuid4().hex, + } + self.assertRaises( + exception.ValidationError, + PROVIDERS.unified_limit_api.update_registered_limit, + registered_limit_1['id'], + update_ref, + ) - update_ref = {'id': registered_limit_1['id'], - 'region_id': 'fake_id'} - self.assertRaises(exception.ValidationError, - PROVIDERS.unified_limit_api.update_registered_limit, - registered_limit_1['id'], update_ref) + update_ref = {'id': registered_limit_1['id'], 'region_id': 'fake_id'} + self.assertRaises( + exception.ValidationError, + PROVIDERS.unified_limit_api.update_registered_limit, + registered_limit_1['id'], + update_ref, + ) def test_update_registered_limit_duplicate(self): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', default_limit=10, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1, registered_limit_2]) + [registered_limit_1, registered_limit_2] + ) # Update registered_limit1 to registered_limit2 - update_ref = {'id': registered_limit_1['id'], - 'region_id': self.region_two['id'], - 'resource_name': 'snapshot'} - self.assertRaises(exception.Conflict, - PROVIDERS.unified_limit_api.update_registered_limit, - registered_limit_1['id'], update_ref) + update_ref = { + 'id': registered_limit_1['id'], + 'region_id': self.region_two['id'], + 'resource_name': 'snapshot', + } + self.assertRaises( + exception.Conflict, + PROVIDERS.unified_limit_api.update_registered_limit, + registered_limit_1['id'], + update_ref, + ) def test_update_registered_limit_when_reference_limit_exist(self): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1]) + [registered_limit_1] + ) limit_1 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_1]) - registered_limit_update = {'id': registered_limit_1['id'], - 'region_id': 'region_two'} + registered_limit_update = { + 'id': registered_limit_1['id'], + 'region_id': 'region_two', + } - self.assertRaises(exception.RegisteredLimitError, - PROVIDERS.unified_limit_api.update_registered_limit, - registered_limit_1['id'], registered_limit_update) + self.assertRaises( + exception.RegisteredLimitError, + PROVIDERS.unified_limit_api.update_registered_limit, + registered_limit_1['id'], + registered_limit_update, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_2]) + [registered_limit_2] + ) limit_2 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_2]) - registered_limit_update = {'id': registered_limit_2['id'], - 'region_id': 'region_two'} + registered_limit_update = { + 'id': registered_limit_2['id'], + 'region_id': 'region_two', + } - self.assertRaises(exception.RegisteredLimitError, - PROVIDERS.unified_limit_api.update_registered_limit, - registered_limit_2['id'], registered_limit_update) + self.assertRaises( + exception.RegisteredLimitError, + PROVIDERS.unified_limit_api.update_registered_limit, + registered_limit_2['id'], + registered_limit_update, + ) def test_list_registered_limits(self): # create two registered limits registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='snapshot', default_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=5, + id=uuid.uuid4().hex, + ) reg_limits_1 = PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1, registered_limit_2]) + [registered_limit_1, registered_limit_2] + ) # list reg_limits_2 = PROVIDERS.unified_limit_api.list_registered_limits() @@ -261,18 +376,26 @@ class RegisteredLimitTests(object): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='snapshot', default_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=5, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1, registered_limit_2]) + [registered_limit_1, registered_limit_2] + ) # list, limit is 1 hints = driver_hints.Hints() reg_limits = PROVIDERS.unified_limit_api.list_registered_limits( - hints=hints) + hints=hints + ) self.assertEqual(1, len(reg_limits)) if reg_limits[0]['id'] == registered_limit_1['id']: @@ -284,13 +407,20 @@ class RegisteredLimitTests(object): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', default_limit=10, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1, registered_limit_2]) + [registered_limit_1, registered_limit_2] + ) hints = driver_hints.Hints() hints.add_filter('service_id', self.service_one['id']) @@ -312,100 +442,144 @@ class RegisteredLimitTests(object): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='snapshot', default_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=5, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1, registered_limit_2]) + [registered_limit_1, registered_limit_2] + ) # show one res = PROVIDERS.unified_limit_api.get_registered_limit( - registered_limit_2['id']) + registered_limit_2['id'] + ) self.assertDictEqual(registered_limit_2, res) def test_get_registered_limit_returns_not_found(self): - self.assertRaises(exception.RegisteredLimitNotFound, - PROVIDERS.unified_limit_api.get_registered_limit, - uuid.uuid4().hex) + self.assertRaises( + exception.RegisteredLimitNotFound, + PROVIDERS.unified_limit_api.get_registered_limit, + uuid.uuid4().hex, + ) def test_delete_registered_limit(self): # create two registered limits registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='snapshot', default_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=5, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1, registered_limit_2]) + [registered_limit_1, registered_limit_2] + ) # delete one PROVIDERS.unified_limit_api.delete_registered_limit( - registered_limit_1['id']) - self.assertRaises(exception.RegisteredLimitNotFound, - PROVIDERS.unified_limit_api.get_registered_limit, - registered_limit_1['id']) + registered_limit_1['id'] + ) + self.assertRaises( + exception.RegisteredLimitNotFound, + PROVIDERS.unified_limit_api.get_registered_limit, + registered_limit_1['id'], + ) reg_limits = PROVIDERS.unified_limit_api.list_registered_limits() self.assertEqual(1, len(reg_limits)) self.assertEqual(registered_limit_2['id'], reg_limits[0]['id']) def test_delete_registered_limit_returns_not_found(self): - self.assertRaises(exception.RegisteredLimitNotFound, - PROVIDERS.unified_limit_api.delete_registered_limit, - uuid.uuid4().hex) + self.assertRaises( + exception.RegisteredLimitNotFound, + PROVIDERS.unified_limit_api.delete_registered_limit, + uuid.uuid4().hex, + ) def test_delete_registered_limit_when_reference_limit_exist(self): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1]) + [registered_limit_1] + ) limit_1 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_1]) - self.assertRaises(exception.RegisteredLimitError, - PROVIDERS.unified_limit_api.delete_registered_limit, - registered_limit_1['id']) + self.assertRaises( + exception.RegisteredLimitError, + PROVIDERS.unified_limit_api.delete_registered_limit, + registered_limit_1['id'], + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_2]) + [registered_limit_2] + ) limit_2 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_2]) - self.assertRaises(exception.RegisteredLimitError, - PROVIDERS.unified_limit_api.delete_registered_limit, - registered_limit_2['id']) + self.assertRaises( + exception.RegisteredLimitError, + PROVIDERS.unified_limit_api.delete_registered_limit, + registered_limit_2['id'], + ) class LimitTests(object): def test_default_enforcement_model_is_flat(self): expected = { - 'description': ('Limit enforcement and validation does not take ' - 'project hierarchy into consideration.'), - 'name': 'flat' + 'description': ( + 'Limit enforcement and validation does not take ' + 'project hierarchy into consideration.' + ), + 'name': 'flat', } self.assertEqual(expected, PROVIDERS.unified_limit_api.get_model()) def test_registering_unsupported_enforcement_model_fails(self): self.assertRaises( - ValueError, self.config_fixture.config, group='unified_limit', - enforcement_model=uuid.uuid4().hex + ValueError, + self.config_fixture.config, + group='unified_limit', + enforcement_model=uuid.uuid4().hex, ) def test_create_project_limit(self): @@ -414,9 +588,12 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, description='test description', - domain_id=None) + domain_id=None, + ) limits = PROVIDERS.unified_limit_api.create_limits([limit_1]) self.assertDictEqual(limit_1, limits[0]) @@ -425,14 +602,20 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=5, id=uuid.uuid4().hex, - domain_id=None) + resource_name='snapshot', + resource_limit=5, + id=uuid.uuid4().hex, + domain_id=None, + ) limit_3 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='backup', resource_limit=5, id=uuid.uuid4().hex, - domain_id=None) + resource_name='backup', + resource_limit=5, + id=uuid.uuid4().hex, + domain_id=None, + ) limits = PROVIDERS.unified_limit_api.create_limits([limit_2, limit_3]) for limit in limits: @@ -446,9 +629,12 @@ class LimitTests(object): project_id=None, service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, description='test description', - domain_id=self.domain_default['id']) + domain_id=self.domain_default['id'], + ) limits = PROVIDERS.unified_limit_api.create_limits([limit_1]) self.assertDictEqual(limit_1, limits[0]) @@ -457,7 +643,10 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_1]) # use different id but the same project_id, service_id and region_id @@ -465,18 +654,26 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) - self.assertRaises(exception.Conflict, - PROVIDERS.unified_limit_api.create_limits, - [limit_1]) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) + self.assertRaises( + exception.Conflict, + PROVIDERS.unified_limit_api.create_limits, + [limit_1], + ) def test_create_domain_limit_duplicate(self): limit_1 = unit.new_limit_ref( project_id=None, service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, - domain_id=self.domain_default['id']) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + domain_id=self.domain_default['id'], + ) PROVIDERS.unified_limit_api.create_limits([limit_1]) # use different id but the same domain_id, service_id and region_id @@ -484,49 +681,72 @@ class LimitTests(object): project_id=None, service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, - domain_id=self.domain_default['id']) - self.assertRaises(exception.Conflict, - PROVIDERS.unified_limit_api.create_limits, - [limit_1]) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + domain_id=self.domain_default['id'], + ) + self.assertRaises( + exception.Conflict, + PROVIDERS.unified_limit_api.create_limits, + [limit_1], + ) def test_create_limit_with_invalid_service_raises_validation_error(self): limit = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=uuid.uuid4().hex, region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) - self.assertRaises(exception.ValidationError, - PROVIDERS.unified_limit_api.create_limits, - [limit]) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) + self.assertRaises( + exception.ValidationError, + PROVIDERS.unified_limit_api.create_limits, + [limit], + ) def test_create_limit_with_invalid_region_raises_validation_error(self): limit = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=uuid.uuid4().hex, - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) - self.assertRaises(exception.ValidationError, - PROVIDERS.unified_limit_api.create_limits, - [limit]) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) + self.assertRaises( + exception.ValidationError, + PROVIDERS.unified_limit_api.create_limits, + [limit], + ) def test_create_limit_without_reference_registered_limit(self): limit_1 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) - self.assertRaises(exception.NoLimitReference, - PROVIDERS.unified_limit_api.create_limits, - [limit_1]) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) + self.assertRaises( + exception.NoLimitReference, + PROVIDERS.unified_limit_api.create_limits, + [limit_1], + ) def test_create_limit_description_none(self): limit = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, - description=None) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + description=None, + ) res = PROVIDERS.unified_limit_api.create_limits([limit]) self.assertIsNone(res[0]['description']) @@ -535,7 +755,10 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) limit.pop('description') res = PROVIDERS.unified_limit_api.create_limits([limit]) self.assertIsNone(res[0]['description']) @@ -546,25 +769,32 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) limit_2 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + resource_limit=5, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_1, limit_2]) expect_limit = 8 - limit_update = {'id': limit_1['id'], - 'resource_limit': expect_limit} - res = PROVIDERS.unified_limit_api.update_limit(limit_1['id'], - limit_update) + limit_update = {'id': limit_1['id'], 'resource_limit': expect_limit} + res = PROVIDERS.unified_limit_api.update_limit( + limit_1['id'], limit_update + ) self.assertEqual(expect_limit, res['resource_limit']) # 'id' can be omitted in the update body limit_update = {'resource_limit': expect_limit} - res = PROVIDERS.unified_limit_api.update_limit(limit_2['id'], - limit_update) + res = PROVIDERS.unified_limit_api.update_limit( + limit_2['id'], limit_update + ) self.assertEqual(expect_limit, res['resource_limit']) def test_list_limits(self): @@ -573,14 +803,20 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, - domain_id=None) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + domain_id=None, + ) limit_2 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=5, id=uuid.uuid4().hex, - domain_id=None) + resource_name='snapshot', + resource_limit=5, + id=uuid.uuid4().hex, + domain_id=None, + ) PROVIDERS.unified_limit_api.create_limits([limit_1, limit_2]) # list @@ -601,14 +837,20 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, - domain_id=None) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + domain_id=None, + ) limit_2 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=5, id=uuid.uuid4().hex, - domain_id=None) + resource_name='snapshot', + resource_limit=5, + id=uuid.uuid4().hex, + domain_id=None, + ) PROVIDERS.unified_limit_api.create_limits([limit_1, limit_2]) # list, limit is 1 @@ -625,20 +867,29 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, - domain_id=None) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + domain_id=None, + ) limit_2 = unit.new_limit_ref( project_id=self.project_baz['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=10, id=uuid.uuid4().hex, - domain_id=None) + resource_name='snapshot', + resource_limit=10, + id=uuid.uuid4().hex, + domain_id=None, + ) limit_3 = unit.new_limit_ref( project_id=None, service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=10, id=uuid.uuid4().hex, - domain_id=self.domain_default['id']) + resource_name='snapshot', + resource_limit=10, + id=uuid.uuid4().hex, + domain_id=self.domain_default['id'], + ) PROVIDERS.unified_limit_api.create_limits([limit_1, limit_2, limit_3]) hints = driver_hints.Hints() @@ -672,12 +923,18 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) limit_2 = unit.new_limit_ref( project_id=self.project_baz['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=10, id=uuid.uuid4().hex) + resource_name='snapshot', + resource_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_1, limit_2]) hints = driver_hints.Hints() @@ -692,14 +949,20 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex, - domain_id=None) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + domain_id=None, + ) limit_2 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=5, id=uuid.uuid4().hex, - domain_id=None) + resource_name='snapshot', + resource_limit=5, + id=uuid.uuid4().hex, + domain_id=None, + ) PROVIDERS.unified_limit_api.create_limits([limit_1, limit_2]) # show one @@ -707,9 +970,11 @@ class LimitTests(object): self.assertDictEqual(limit_2, res) def test_get_limit_returns_not_found(self): - self.assertRaises(exception.LimitNotFound, - PROVIDERS.unified_limit_api.get_limit, - uuid.uuid4().hex) + self.assertRaises( + exception.LimitNotFound, + PROVIDERS.unified_limit_api.get_limit, + uuid.uuid4().hex, + ) def test_delete_limit(self): # create two limits @@ -717,24 +982,34 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) limit_2 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + resource_limit=5, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_1, limit_2]) # delete one PROVIDERS.unified_limit_api.delete_limit(limit_1['id']) # delete again - self.assertRaises(exception.LimitNotFound, - PROVIDERS.unified_limit_api.get_limit, - limit_1['id']) + self.assertRaises( + exception.LimitNotFound, + PROVIDERS.unified_limit_api.get_limit, + limit_1['id'], + ) def test_delete_limit_returns_not_found(self): - self.assertRaises(exception.LimitNotFound, - PROVIDERS.unified_limit_api.delete_limit, - uuid.uuid4().hex) + self.assertRaises( + exception.LimitNotFound, + PROVIDERS.unified_limit_api.delete_limit, + uuid.uuid4().hex, + ) def test_delete_limit_project(self): # create two limits @@ -742,12 +1017,18 @@ class LimitTests(object): project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', resource_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + resource_limit=10, + id=uuid.uuid4().hex, + ) limit_2 = unit.new_limit_ref( project_id=self.project_bar['id'], service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', resource_limit=5, id=uuid.uuid4().hex) + resource_name='snapshot', + resource_limit=5, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_limits([limit_1, limit_2]) # delete a unrelated project, the limits should still be there. diff --git a/keystone/tests/unit/mapping_fixtures.py b/keystone/tests/unit/mapping_fixtures.py index 7bf917407b..4beb7328c3 100644 --- a/keystone/tests/unit/mapping_fixtures.py +++ b/keystone/tests/unit/mapping_fixtures.py @@ -34,68 +34,32 @@ MAPPING_SMALL = { "rules": [ { "local": [ - { - "group": { - "id": EMPLOYEE_GROUP_ID - } - }, - { - "user": { - "name": "{0}" - } - } + {"group": {"id": EMPLOYEE_GROUP_ID}}, + {"user": {"name": "{0}"}}, ], "remote": [ - { - "type": "UserName" - }, + {"type": "UserName"}, { "type": "orgPersonType", - "not_any_of": [ - "Contractor", - "SubContractor" - ] + "not_any_of": ["Contractor", "SubContractor"], }, - { - "type": "LastName", - "any_one_of": [ - "Bo" - ] - } - ] + {"type": "LastName", "any_one_of": ["Bo"]}, + ], }, { "local": [ - { - "group": { - "id": CONTRACTOR_GROUP_ID - } - }, - { - "user": { - "name": "{0}" - } - } + {"group": {"id": CONTRACTOR_GROUP_ID}}, + {"user": {"name": "{0}"}}, ], "remote": [ - { - "type": "UserName" - }, + {"type": "UserName"}, { "type": "orgPersonType", - "any_one_of": [ - "Contractor", - "SubContractor" - ] + "any_one_of": ["Contractor", "SubContractor"], }, - { - "type": "FirstName", - "any_one_of": [ - "Jill" - ] - } - ] - } + {"type": "FirstName", "any_one_of": ["Jill"]}, + ], + }, ] } @@ -108,117 +72,60 @@ MAPPING_LARGE = { { "local": [ { - "user": { - "name": "{0} {1}", - "email": "{2}" - }, - "group": { - "id": EMPLOYEE_GROUP_ID - } + "user": {"name": "{0} {1}", "email": "{2}"}, + "group": {"id": EMPLOYEE_GROUP_ID}, } ], "remote": [ - { - "type": "FirstName" - }, - { - "type": "LastName" - }, - { - "type": "Email" - }, + {"type": "FirstName"}, + {"type": "LastName"}, + {"type": "Email"}, { "type": "orgPersonType", - "any_one_of": [ - "Admin", - "Big Cheese" - ] - } - ] + "any_one_of": ["Admin", "Big Cheese"], + }, + ], }, { - "local": [ - { - "user": { - "name": "{0}", - "email": "{1}" - } - } - ], + "local": [{"user": {"name": "{0}", "email": "{1}"}}], "remote": [ - { - "type": "UserName" - }, - { - "type": "Email" - }, + {"type": "UserName"}, + {"type": "Email"}, { "type": "orgPersonType", "not_any_of": [ "Admin", "Employee", "Contractor", - "Tester" - ] - } - ] + "Tester", + ], + }, + ], }, { "local": [ - { - "group": { - "id": TESTER_GROUP_ID - } - }, - { - "group": { - "id": DEVELOPER_GROUP_ID - } - }, - { - "user": { - "name": "{0}" - } - } + {"group": {"id": TESTER_GROUP_ID}}, + {"group": {"id": DEVELOPER_GROUP_ID}}, + {"user": {"name": "{0}"}}, ], "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "any_one_of": [ - "Tester" - ] - }, + {"type": "UserName"}, + {"type": "orgPersonType", "any_one_of": ["Tester"]}, { "type": "Email", - "any_one_of": [ - ".*@example.com$" - ], - "regex": True - } - ] - } + "any_one_of": [".*@example.com$"], + "regex": True, + }, + ], + }, ] } MAPPING_BAD_REQ = { "rules": [ { - "local": [ - { - "user": "name" - } - ], - "remote": [ - { - "type": "UserName", - "bad_requirement": [ - "Young" - ] - } - ] + "local": [{"user": "name"}], + "remote": [{"type": "UserName", "bad_requirement": ["Young"]}], } ] } @@ -226,110 +133,44 @@ MAPPING_BAD_REQ = { MAPPING_BAD_VALUE = { "rules": [ { - "local": [ - { - "user": "name" - } - ], - "remote": [ - { - "type": "UserName", - "any_one_of": "should_be_list" - } - ] + "local": [{"user": "name"}], + "remote": [{"type": "UserName", "any_one_of": "should_be_list"}], } ] } -MAPPING_NO_RULES = { - 'rules': [] -} +MAPPING_NO_RULES = {'rules': []} -MAPPING_NO_REMOTE = { - "rules": [ - { - "local": [ - { - "user": "name" - } - ], - "remote": [] - } - ] -} +MAPPING_NO_REMOTE = {"rules": [{"local": [{"user": "name"}], "remote": []}]} MAPPING_MISSING_LOCAL = { "rules": [ - { - "remote": [ - { - "type": "UserName", - "any_one_of": "should_be_list" - } - ] - } + {"remote": [{"type": "UserName", "any_one_of": "should_be_list"}]} ] } MAPPING_WRONG_TYPE = { "rules": [ - { - "local": [ - { - "user": "{1}" - } - ], - "remote": [ - { - "not_type": "UserName" - } - ] - } + {"local": [{"user": "{1}"}], "remote": [{"not_type": "UserName"}]} ] } MAPPING_MISSING_TYPE = { - "rules": [ - { - "local": [ - { - "user": "{1}" - } - ], - "remote": [ - {} - ] - } - ] + "rules": [{"local": [{"user": "{1}"}], "remote": [{}]}] } MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF = { "rules": [ { - "local": [ - { - "group": { - "id": "0cd5e9" - } - }, - { - "user": { - "name": "{0}" - } - } - ], + "local": [{"group": {"id": "0cd5e9"}}, {"user": {"name": "{0}"}}], "remote": [ - { - "type": "UserName" - }, + {"type": "UserName"}, { "type": "orgPersonType", - "not_any_of": [ - "SubContractor" - ], - "invalid_type": "xyz" - } - ] + "not_any_of": ["SubContractor"], + "invalid_type": "xyz", + }, + ], } ] } @@ -337,30 +178,15 @@ MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF = { MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF = { "rules": [ { - "local": [ - { - "group": { - "id": "0cd5e9" - } - }, - { - "user": { - "name": "{0}" - } - } - ], + "local": [{"group": {"id": "0cd5e9"}}, {"user": {"name": "{0}"}}], "remote": [ - { - "type": "UserName" - }, + {"type": "UserName"}, { "type": "orgPersonType", - "any_one_of": [ - "SubContractor" - ], - "invalid_type": "xyz" - } - ] + "any_one_of": ["SubContractor"], + "invalid_type": "xyz", + }, + ], } ] } @@ -368,27 +194,11 @@ MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF = { MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE = { "rules": [ { - "local": [ - { - "group": { - "id": "0cd5e9" - } - }, - { - "user": { - "name": "{0}" - } - } - ], + "local": [{"group": {"id": "0cd5e9"}}, {"user": {"name": "{0}"}}], "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "invalid_type": "xyz" - } - ] + {"type": "UserName"}, + {"type": "orgPersonType", "invalid_type": "xyz"}, + ], } ] } @@ -396,32 +206,14 @@ MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE = { MAPPING_EXTRA_RULES_PROPS = { "rules": [ { - "local": [ - { - "group": { - "id": "0cd5e9" - } - }, - { - "user": { - "name": "{0}" - } - } - ], + "local": [{"group": {"id": "0cd5e9"}}, {"user": {"name": "{0}"}}], "invalid_type": { "id": "xyz", }, "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "not_any_of": [ - "SubContractor" - ] - } - ] + {"type": "UserName"}, + {"type": "orgPersonType", "not_any_of": ["SubContractor"]}, + ], } ] } @@ -436,30 +228,18 @@ MAPPING_TESTER_REGEX = { } } ], - "remote": [ - { - "type": "UserName" - } - ] + "remote": [{"type": "UserName"}], }, { - "local": [ - { - "group": { - "id": TESTER_GROUP_ID - } - } - ], + "local": [{"group": {"id": TESTER_GROUP_ID}}], "remote": [ { "type": "orgPersonType", - "any_one_of": [ - ".*Tester*" - ], - "regex": True + "any_one_of": [".*Tester*"], + "regex": True, } - ] - } + ], + }, ] } @@ -472,22 +252,10 @@ MAPPING_DIRECT_MAPPING_THROUGH_KEYWORD = { "user": { "name": "{0}", } - }, - { - "group": { - "id": TESTER_GROUP_ID - } - } + {"group": {"id": TESTER_GROUP_ID}}, ], - "remote": [ - { - "type": "UserName", - "any_one_of": [ - "bwilliams" - ] - } - ] + "remote": [{"type": "UserName", "any_one_of": ["bwilliams"]}], } ] } @@ -500,35 +268,26 @@ MAPPING_DEVELOPER_REGEX = { "user": { "name": "{0}", }, - "group": { - "id": DEVELOPER_GROUP_ID - } + "group": {"id": DEVELOPER_GROUP_ID}, } ], "remote": [ - { - "type": "UserName" - }, + {"type": "UserName"}, { "type": "orgPersonType", - "any_one_of": [ - "Developer" - ], + "any_one_of": ["Developer"], }, { "type": "Email", - "not_any_of": [ - ".*@example.org$" - ], - "regex": True - } - ] + "not_any_of": [".*@example.org$"], + "regex": True, + }, + ], } ] } MAPPING_GROUP_NAMES = { - "rules": [ { "local": [ @@ -538,57 +297,39 @@ MAPPING_GROUP_NAMES = { } } ], - "remote": [ - { - "type": "UserName" - } - ] + "remote": [{"type": "UserName"}], }, { "local": [ { "group": { "name": DEVELOPER_GROUP_NAME, - "domain": { - "name": DEVELOPER_GROUP_DOMAIN_NAME - } + "domain": {"name": DEVELOPER_GROUP_DOMAIN_NAME}, } } ], "remote": [ { "type": "orgPersonType", - "any_one_of": [ - "Employee" - ], + "any_one_of": ["Employee"], } - ] + ], }, { "local": [ { "group": { "name": TESTER_GROUP_NAME, - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } + "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}, } } ], - "remote": [ - { - "type": "orgPersonType", - "any_one_of": [ - "BuildingX" - ] - } - ] + "remote": [{"type": "orgPersonType", "any_one_of": ["BuildingX"]}], }, ] } MAPPING_GROUP_NAME_WITHOUT_DOMAIN = { - "rules": [ { "local": [ @@ -601,65 +342,49 @@ MAPPING_GROUP_NAME_WITHOUT_DOMAIN = { "remote": [ { "type": "orgPersonType", - "any_one_of": [ - "Employee" - ], + "any_one_of": ["Employee"], } - ] + ], }, ] } MAPPING_GROUP_ID_WITH_DOMAIN = { - "rules": [ { "local": [ { "group": { "id": EMPLOYEE_GROUP_ID, - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } + "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}, } } ], "remote": [ { "type": "orgPersonType", - "any_one_of": [ - "Employee" - ], + "any_one_of": ["Employee"], } - ] + ], }, ] } MAPPING_BAD_GROUP = { - "rules": [ { - "local": [ - { - "group": { - } - } - ], + "local": [{"group": {}}], "remote": [ { "type": "orgPersonType", - "any_one_of": [ - "Employee" - ], + "any_one_of": ["Employee"], } - ] + ], }, ] } MAPPING_BAD_DOMAIN = { - "rules": [ { "local": [ @@ -668,19 +393,17 @@ MAPPING_BAD_DOMAIN = { "id": EMPLOYEE_GROUP_ID, "domain": { "id": DEVELOPER_GROUP_DOMAIN_ID, - "badkey": "badvalue" - } + "badkey": "badvalue", + }, } } ], "remote": [ { "type": "orgPersonType", - "any_one_of": [ - "Employee" - ], + "any_one_of": ["Employee"], } - ] + ], }, ] } @@ -692,24 +415,15 @@ MAPPING_EPHEMERAL_USER = { { "user": { "name": "{0}", - "domain": { - "id": FEDERATED_DOMAIN - }, - "type": "ephemeral" + "domain": {"id": FEDERATED_DOMAIN}, + "type": "ephemeral", } } ], "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "tbo" - ] - } - ] + {"type": "UserName"}, + {"type": "UserName", "any_one_of": ["tbo"]}, + ], } ] } @@ -721,21 +435,15 @@ MAPPING_EPHEMERAL_USER_REMOTE_DOMAIN = { { "user": { "name": "{0}", - "domain": { - "name": "{1}" - }, - "type": "ephemeral" + "domain": {"name": "{1}"}, + "type": "ephemeral", } } ], "remote": [ - { - "type": "UserName" - }, - { - "type": "OIDC-openstack-user-domain" - }, - ] + {"type": "UserName"}, + {"type": "OIDC-openstack-user-domain"}, + ], } ] } @@ -746,27 +454,14 @@ MAPPING_GROUPS_WHITELIST = { "remote": [ { "type": "orgPersonType", - "whitelist": [ - "Developer", "Contractor" - ] + "whitelist": ["Developer", "Contractor"], }, - { - "type": "UserName" - } + {"type": "UserName"}, ], "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, - { - "user": { - "name": "{1}" - } - } - ] + {"groups": "{0}", "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}}, + {"user": {"name": "{1}"}}, + ], } ] } @@ -778,24 +473,15 @@ MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN = { { "user": { "name": "{0}", - "domain": { - "id": LOCAL_DOMAIN - }, - "type": "ephemeral" + "domain": {"id": LOCAL_DOMAIN}, + "type": "ephemeral", } } ], "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "jsmith" - ] - } - ] + {"type": "UserName"}, + {"type": "UserName", "any_one_of": ["jsmith"]}, + ], } ] } @@ -806,16 +492,14 @@ MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN = { "remote": [ { "type": "orgPersonType", - "whitelist": [ - "Developer", "Contractor" - ] + "whitelist": ["Developer", "Contractor"], }, ], "local": [ { "groups": "{0}", } - ] + ], } ] } @@ -827,24 +511,15 @@ MAPPING_LOCAL_USER_LOCAL_DOMAIN = { { "user": { "name": "{0}", - "domain": { - "id": LOCAL_DOMAIN - }, - "type": "local" + "domain": {"id": LOCAL_DOMAIN}, + "type": "local", } } ], "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "jsmith" - ] - } - ] + {"type": "UserName"}, + {"type": "UserName", "any_one_of": ["jsmith"]}, + ], } ] } @@ -855,30 +530,19 @@ MAPPING_GROUPS_BLACKLIST_MULTIPLES = { "remote": [ { "type": "orgPersonType", - "blacklist": [ - "Developer", "Manager" - ] - }, - { - "type": "Thing" # this could be variable length! - }, - { - "type": "UserName" + "blacklist": ["Developer", "Manager"], }, + {"type": "Thing"}, # this could be variable length! + {"type": "UserName"}, ], "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, + {"groups": "{0}", "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}}, { "user": { "name": "{2}", } - } - ] + }, + ], } ] } @@ -888,27 +552,14 @@ MAPPING_GROUPS_BLACKLIST = { "remote": [ { "type": "orgPersonType", - "blacklist": [ - "Developer", "Manager" - ] + "blacklist": ["Developer", "Manager"], }, - { - "type": "UserName" - } + {"type": "UserName"}, ], "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, - { - "user": { - "name": "{1}" - } - } - ] + {"groups": "{0}", "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}}, + {"user": {"name": "{1}"}}, + ], } ] } @@ -919,20 +570,13 @@ MAPPING_GROUPS_BLACKLIST_REGEX = { "remote": [ { "type": "orgPersonType", - "blacklist": [ - ".*Employee$" - ], - "regex": True + "blacklist": [".*Employee$"], + "regex": True, }, ], "local": [ - { - "groups": "{0}", - "domain": { - "id": FEDERATED_DOMAIN - } - }, - ] + {"groups": "{0}", "domain": {"id": FEDERATED_DOMAIN}}, + ], } ] } @@ -943,20 +587,13 @@ MAPPING_GROUPS_WHITELIST_REGEX = { "remote": [ { "type": "orgPersonType", - "whitelist": [ - ".*Employee$" - ], - "regex": True + "whitelist": [".*Employee$"], + "regex": True, }, ], "local": [ - { - "groups": "{0}", - "domain": { - "id": FEDERATED_DOMAIN - } - }, - ] + {"groups": "{0}", "domain": {"id": FEDERATED_DOMAIN}}, + ], } ] } @@ -966,24 +603,11 @@ MAPPING_GROUPS_WHITELIST_REGEX = { MAPPING_USER_IDS = { "rules": [ { - "local": [ - { - "user": { - "name": "{0}" - } - } - ], + "local": [{"user": {"name": "{0}"}}], "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "jsmith" - ] - } - ] + {"type": "UserName"}, + {"type": "UserName", "any_one_of": ["jsmith"]}, + ], }, { "local": [ @@ -991,43 +615,21 @@ MAPPING_USER_IDS = { "user": { "name": "{0}", "id": "abc123@example.com", - "domain": { - "id": "federated" - } + "domain": {"id": "federated"}, } } ], "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "tbo" - ] - } - ] + {"type": "UserName"}, + {"type": "UserName", "any_one_of": ["tbo"]}, + ], }, { - "local": [ - { - "user": { - "id": "{0}" - } - } - ], + "local": [{"user": {"id": "{0}"}}], "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "bob" - ] - } - ] + {"type": "UserName"}, + {"type": "UserName", "any_one_of": ["bob"]}, + ], }, { "local": [ @@ -1035,24 +637,15 @@ MAPPING_USER_IDS = { "user": { "id": "abc123@example.com", "name": "{0}", - "domain": { - "id": "federated" - } + "domain": {"id": "federated"}, } } ], "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "bwilliams" - ] - } - ] - } + {"type": "UserName"}, + {"type": "UserName", "any_one_of": ["bwilliams"]}, + ], + }, ] } @@ -1062,16 +655,14 @@ MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN = { "remote": [ { "type": "orgPersonType", - "blacklist": [ - "Developer", "Manager" - ] + "blacklist": ["Developer", "Manager"], }, ], "local": [ { "groups": "{0}", }, - ] + ], } ] } @@ -1082,22 +673,13 @@ MAPPING_GROUPS_WHITELIST_AND_BLACKLIST = { "remote": [ { "type": "orgPersonType", - "blacklist": [ - "Employee" - ], - "whitelist": [ - "Contractor" - ] + "blacklist": ["Employee"], + "whitelist": ["Contractor"], }, ], "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, - ] + {"groups": "{0}", "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}}, + ], } ] } @@ -1111,21 +693,15 @@ MAPPING_WITH_USERNAME_AND_DOMAINNAME = { { 'user': { 'name': '{0}', - 'domain': { - 'name': '{1}' - }, - 'type': 'local' + 'domain': {'name': '{1}'}, + 'type': 'local', } } ], 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - }, - { - 'type': 'SSL_CLIENT_DOMAIN_NAME' - } - ] + {'type': 'SSL_CLIENT_USER_NAME'}, + {'type': 'SSL_CLIENT_DOMAIN_NAME'}, + ], } ] } @@ -1139,21 +715,15 @@ MAPPING_WITH_USERID_AND_DOMAINNAME = { { 'user': { 'id': '{0}', - 'domain': { - 'name': '{1}' - }, - 'type': 'local' + 'domain': {'name': '{1}'}, + 'type': 'local', } } ], 'remote': [ - { - 'type': 'SSL_CLIENT_USER_ID' - }, - { - 'type': 'SSL_CLIENT_DOMAIN_NAME' - } - ] + {'type': 'SSL_CLIENT_USER_ID'}, + {'type': 'SSL_CLIENT_DOMAIN_NAME'}, + ], } ] } @@ -1167,21 +737,15 @@ MAPPING_WITH_USERNAME_AND_DOMAINID = { { 'user': { 'name': '{0}', - 'domain': { - 'id': '{1}' - }, - 'type': 'local' + 'domain': {'id': '{1}'}, + 'type': 'local', } } ], 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - }, - { - 'type': 'SSL_CLIENT_DOMAIN_ID' - } - ] + {'type': 'SSL_CLIENT_USER_NAME'}, + {'type': 'SSL_CLIENT_DOMAIN_ID'}, + ], } ] } @@ -1195,21 +759,15 @@ MAPPING_WITH_USERID_AND_DOMAINID = { { 'user': { 'id': '{0}', - 'domain': { - 'id': '{1}' - }, - 'type': 'local' + 'domain': {'id': '{1}'}, + 'type': 'local', } } ], 'remote': [ - { - 'type': 'SSL_CLIENT_USER_ID' - }, - { - 'type': 'SSL_CLIENT_DOMAIN_ID' - } - ] + {'type': 'SSL_CLIENT_USER_ID'}, + {'type': 'SSL_CLIENT_DOMAIN_ID'}, + ], } ] } @@ -1218,21 +776,8 @@ MAPPING_WITH_USERID_AND_DOMAINID = { MAPPING_WITH_DOMAINID_ONLY = { 'rules': [ { - 'local': [ - { - 'user': { - 'domain': { - 'id': '{0}' - }, - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_DOMAIN_ID' - } - ] + 'local': [{'user': {'domain': {'id': '{0}'}, 'type': 'local'}}], + 'remote': [{'type': 'SSL_CLIENT_DOMAIN_ID'}], } ] } @@ -1241,34 +786,18 @@ MAPPING_GROUPS_IDS_WHITELIST = { "rules": [ { "local": [ - { - "user": { - "name": "{0}" - } - }, - { - "group_ids": "{1}" - }, - { - "group": { - "id": "{2}" - } - } + {"user": {"name": "{0}"}}, + {"group_ids": "{1}"}, + {"group": {"id": "{2}"}}, ], "remote": [ - { - "type": "name" - }, + {"type": "name"}, { "type": "group_ids", - "whitelist": [ - "abc123", "ghi789", "321cba" - ] + "whitelist": ["abc123", "ghi789", "321cba"], }, - { - "type": "group" - } - ] + {"type": "group"}, + ], } ] } @@ -1277,34 +806,15 @@ MAPPING_GROUPS_IDS_BLACKLIST = { "rules": [ { "local": [ - { - "user": { - "name": "{0}" - } - }, - { - "group_ids": "{1}" - }, - { - "group": { - "id": "{2}" - } - } + {"user": {"name": "{0}"}}, + {"group_ids": "{1}"}, + {"group": {"id": "{2}"}}, ], "remote": [ - { - "type": "name" - }, - { - "type": "group_ids", - "blacklist": [ - "def456" - ] - }, - { - "type": "group" - } - ] + {"type": "name"}, + {"type": "group_ids", "blacklist": ["def456"]}, + {"type": "group"}, + ], } ] } @@ -1313,21 +823,8 @@ MAPPING_GROUPS_IDS_BLACKLIST = { MAPPING_WITH_DOMAINNAME_ONLY = { 'rules': [ { - 'local': [ - { - 'user': { - 'domain': { - 'name': '{0}' - }, - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_DOMAIN_NAME' - } - ] + 'local': [{'user': {'domain': {'name': '{0}'}, 'type': 'local'}}], + 'remote': [{'type': 'SSL_CLIENT_DOMAIN_NAME'}], } ] } @@ -1336,19 +833,8 @@ MAPPING_WITH_DOMAINNAME_ONLY = { MAPPING_WITH_USERNAME_ONLY = { 'rules': [ { - 'local': [ - { - 'user': { - 'name': '{0}', - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - } - ] + 'local': [{'user': {'name': '{0}', 'type': 'local'}}], + 'remote': [{'type': 'SSL_CLIENT_USER_NAME'}], } ] } @@ -1357,19 +843,8 @@ MAPPING_WITH_USERNAME_ONLY = { MAPPING_WITH_USERID_ONLY = { 'rules': [ { - 'local': [ - { - 'user': { - 'id': '{0}', - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_ID' - } - ] + 'local': [{'user': {'id': '{0}', 'type': 'local'}}], + 'remote': [{'type': 'SSL_CLIENT_USER_ID'}], } ] } @@ -1379,20 +854,11 @@ MAPPING_FOR_EPHEMERAL_USER = { { 'local': [ { - 'user': { - 'name': '{0}', - 'type': 'ephemeral' - }, - 'group': { - 'id': 'dummy' - } + 'user': {'name': '{0}', 'type': 'ephemeral'}, + 'group': {'id': 'dummy'}, } ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - } - ] + 'remote': [{'type': 'SSL_CLIENT_USER_NAME'}], } ] } @@ -1402,23 +868,11 @@ MAPPING_FOR_EPHEMERAL_USER_AND_GROUP_DOMAIN_NAME = { { 'local': [ { - 'user': { - 'name': '{0}', - 'type': 'ephemeral' - }, - 'group': { - 'name': 'dummy', - 'domain': { - 'name': 'dummy' - } - } + 'user': {'name': '{0}', 'type': 'ephemeral'}, + 'group': {'name': 'dummy', 'domain': {'name': 'dummy'}}, } ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - } - ] + 'remote': [{'type': 'SSL_CLIENT_USER_NAME'}], } ] } @@ -1426,21 +880,8 @@ MAPPING_FOR_EPHEMERAL_USER_AND_GROUP_DOMAIN_NAME = { MAPPING_FOR_DEFAULT_EPHEMERAL_USER = { 'rules': [ { - 'local': [ - { - 'user': { - 'name': '{0}' - }, - 'group': { - 'id': 'dummy' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - } - ] + 'local': [{'user': {'name': '{0}'}, 'group': {'id': 'dummy'}}], + 'remote': [{'type': 'SSL_CLIENT_USER_NAME'}], } ] } @@ -1448,38 +889,22 @@ MAPPING_FOR_DEFAULT_EPHEMERAL_USER = { MAPPING_GROUPS_WHITELIST_PASS_THROUGH = { "rules": [ { - "remote": [ - { - "type": "UserName" - } - ], + "remote": [{"type": "UserName"}], "local": [ { "user": { "name": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } + "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}, } } - ] - }, - { - "remote": [ - { - "type": "orgPersonType", - "whitelist": ['Developer'] - } ], + }, + { + "remote": [{"type": "orgPersonType", "whitelist": ['Developer']}], "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - } - ] - } + {"groups": "{0}", "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}} + ], + }, ] } @@ -1488,18 +913,11 @@ MAPPING_BAD_LOCAL_SETUP = { { "local": [ { - "user": { - "name": "{0}", - "domain": {"id": "default"} - }, - "whatisthis": "local" + "user": {"name": "{0}", "domain": {"id": "default"}}, + "whatisthis": "local", } ], - "remote": [ - { - "type": "UserName" - } - ] + "remote": [{"type": "UserName"}], } ] } @@ -1507,29 +925,12 @@ MAPPING_BAD_LOCAL_SETUP = { MAPPING_BAD_LOCAL_TYPE_USER_IN_ASSERTION = { "rules": [ { - "local": [ - { - "user": { - "name": "{0}", - "groups": "{1}" - } - } - ], + "local": [{"user": {"name": "{0}", "groups": "{1}"}}], "remote": [ - { - "type": "openstack_user" - }, - { - "type": "openstack_groups" - - }, - { - "type": "openstack_roles", - "any_one_of": [ - "Admin" - ] - } - ] + {"type": "openstack_user"}, + {"type": "openstack_groups"}, + {"type": "openstack_roles", "any_one_of": ["Admin"]}, + ], }, ] } @@ -1544,24 +945,12 @@ MAPPING_GROUPS_WITH_EMAIL = { { "type": "userEmail", }, - { - "type": "UserName" - } + {"type": "UserName"}, ], "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, - { - "user": { - "name": "{2}", - "email": "{1}" - } - } - ] + {"groups": "{0}", "domain": {"id": DEVELOPER_GROUP_DOMAIN_ID}}, + {"user": {"name": "{2}", "email": "{1}"}}, + ], } ] } @@ -1570,27 +959,11 @@ MAPPING_GROUPS_WITH_EMAIL = { MAPPING_GROUPS_DOMAIN_OF_USER = { "rules": [ { - "local": - [ - { - "user": - { - "name": "{0}" - } - }, - { - "groups": "{1}" - } + "local": [{"user": {"name": "{0}"}}, {"groups": "{1}"}], + "remote": [ + {"type": "openstack_user"}, + {"type": "openstack_groups"}, ], - "remote": - [ - { - "type": "openstack_user" - }, - { - "type": "openstack_groups" - } - ] } ] } @@ -1600,7 +973,7 @@ EMPLOYEE_ASSERTION = { 'UserName': 'tbo', 'FirstName': 'Tim', 'LastName': 'Bo', - 'orgPersonType': 'Employee;BuildingX' + 'orgPersonType': 'Employee;BuildingX', } EMPLOYEE_PARTTIME_ASSERTION = { @@ -1608,7 +981,7 @@ EMPLOYEE_PARTTIME_ASSERTION = { 'UserName': 'tbo', 'FirstName': 'Tim', 'LastName': 'Bo', - 'orgPersonType': 'Employee;PartTimeEmployee;Manager' + 'orgPersonType': 'Employee;PartTimeEmployee;Manager', } EMPLOYEE_ASSERTION_MULTIPLE_GROUPS = { @@ -1617,7 +990,7 @@ EMPLOYEE_ASSERTION_MULTIPLE_GROUPS = { 'FirstName': 'Tim', 'LastName': 'Bo', 'orgPersonType': 'Developer;Manager;Contractor', - 'Thing': 'yes!;maybe!;no!!' + 'Thing': 'yes!;maybe!;no!!', } EMPLOYEE_ASSERTION_PREFIXED = { @@ -1625,7 +998,7 @@ EMPLOYEE_ASSERTION_PREFIXED = { 'PREFIX_UserName': 'tbo', 'PREFIX_FirstName': 'Tim', 'PREFIX_LastName': 'Bo', - 'PREFIX_orgPersonType': 'SuperEmployee;BuildingX' + 'PREFIX_orgPersonType': 'SuperEmployee;BuildingX', } CONTRACTOR_ASSERTION = { @@ -1633,7 +1006,7 @@ CONTRACTOR_ASSERTION = { 'UserName': 'jsmith', 'FirstName': 'Jill', 'LastName': 'Smith', - 'orgPersonType': 'Contractor;Non-Dev' + 'orgPersonType': 'Contractor;Non-Dev', } ADMIN_ASSERTION = { @@ -1641,7 +1014,7 @@ ADMIN_ASSERTION = { 'UserName': 'bob', 'FirstName': 'Bob', 'LastName': 'Thompson', - 'orgPersonType': 'Admin;Chief' + 'orgPersonType': 'Admin;Chief', } CUSTOMER_ASSERTION = { @@ -1649,7 +1022,7 @@ CUSTOMER_ASSERTION = { 'UserName': 'bwilliams', 'FirstName': 'Beth', 'LastName': 'Williams', - 'orgPersonType': 'Customer' + 'orgPersonType': 'Customer', } ANOTHER_CUSTOMER_ASSERTION = { @@ -1657,7 +1030,7 @@ ANOTHER_CUSTOMER_ASSERTION = { 'UserName': 'markcol', 'FirstName': 'Mark', 'LastName': 'Collins', - 'orgPersonType': 'Managers;CEO;CTO' + 'orgPersonType': 'Managers;CEO;CTO', } TESTER_ASSERTION = { @@ -1665,12 +1038,12 @@ TESTER_ASSERTION = { 'UserName': 'testacct', 'FirstName': 'Test', 'LastName': 'Account', - 'orgPersonType': 'MadeupGroup;Tester;GroupX' + 'orgPersonType': 'MadeupGroup;Tester;GroupX', } ANOTHER_TESTER_ASSERTION = { 'Email': 'testacct@example.com', - 'UserName': 'IamTester' + 'UserName': 'IamTester', } BAD_TESTER_ASSERTION = { @@ -1678,7 +1051,7 @@ BAD_TESTER_ASSERTION = { 'UserName': 'Evil', 'FirstName': 'Test', 'LastName': 'Account', - 'orgPersonType': 'Tester' + 'orgPersonType': 'Tester', } BAD_DEVELOPER_ASSERTION = { @@ -1686,7 +1059,7 @@ BAD_DEVELOPER_ASSERTION = { 'UserName': 'Evil', 'FirstName': 'Develop', 'LastName': 'Account', - 'orgPersonType': 'Developer' + 'orgPersonType': 'Developer', } MALFORMED_TESTER_ASSERTION = { @@ -1697,7 +1070,7 @@ MALFORMED_TESTER_ASSERTION = { 'orgPersonType': 'Tester', 'object': object(), 'dictionary': dict(zip('teststring', range(10))), - 'tuple': tuple(range(5)) + 'tuple': tuple(range(5)), } DEVELOPER_ASSERTION = { @@ -1705,46 +1078,43 @@ DEVELOPER_ASSERTION = { 'UserName': 'developacct', 'FirstName': 'Develop', 'LastName': 'Account', - 'orgPersonType': 'Developer' + 'orgPersonType': 'Developer', } CONTRACTOR_MALFORMED_ASSERTION = { 'UserName': 'user', 'FirstName': object(), - 'orgPersonType': 'Contractor' + 'orgPersonType': 'Contractor', } -LOCAL_USER_ASSERTION = { - 'UserName': 'marek', - 'UserType': 'random' -} +LOCAL_USER_ASSERTION = {'UserName': 'marek', 'UserType': 'random'} ANOTHER_LOCAL_USER_ASSERTION = { 'UserName': 'marek', - 'Position': 'DirectorGeneral' + 'Position': 'DirectorGeneral', } USER_NO_GROUPS_ASSERTION = { 'Email': 'nogroupsuser1@example.org', 'UserName': 'nogroupsuser1', - 'orgPersonType': 'NoGroupsOrg' + 'orgPersonType': 'NoGroupsOrg', } UNMATCHED_GROUP_ASSERTION = { 'REMOTE_USER': 'Any Momoose', - 'REMOTE_USER_GROUPS': 'EXISTS;NO_EXISTS' + 'REMOTE_USER_GROUPS': 'EXISTS;NO_EXISTS', } GROUP_IDS_ASSERTION = { 'name': 'opilotte', 'group_ids': 'abc123;def456;ghi789', - 'group': 'klm012' + 'group': 'klm012', } GROUP_IDS_ASSERTION_ONLY_ONE_GROUP = { 'name': 'opilotte', 'group_ids': '321cba', - 'group': '210mlk' + 'group': '210mlk', } UNICODE_NAME_ASSERTION = { @@ -1752,19 +1122,19 @@ UNICODE_NAME_ASSERTION = { 'PFX_UserName': 'jonkare', 'PFX_FirstName': 'Jon Kåre', 'PFX_LastName': 'Hellån', - 'PFX_orgPersonType': 'Admin;Chief' + 'PFX_orgPersonType': 'Admin;Chief', } GROUPS_ASSERTION_ONLY_ONE_GROUP = { 'userEmail': 'jill@example.com', 'UserName': 'jsmith', - 'groups': 'ALL USERS' + 'groups': 'ALL USERS', } GROUPS_ASSERTION_ONLY_ONE_NUMERICAL_GROUP = { 'userEmail': 'jill@example.com', 'UserName': 'jsmith', - 'groups': '1234' + 'groups': '1234', } GROUPS_DOMAIN_ASSERTION = { @@ -1772,12 +1142,12 @@ GROUPS_DOMAIN_ASSERTION = { 'openstack_user_domain': 'default', 'openstack_roles': 'Admin', 'openstack_groups': 'JSON:{"name":"group1","domain":{"name":"xxx"}};' - 'JSON:{"name":"group2","domain":{"name":"yyy"}}' + 'JSON:{"name":"group2","domain":{"name":"yyy"}}', } USER_WITH_DOMAIN_ASSERTION = { 'UserName': 'marek', - 'OIDC-openstack-user-domain': 'user_domain' + 'OIDC-openstack-user-domain': 'user_domain', } MAPPING_UNICODE = { @@ -1785,33 +1155,19 @@ MAPPING_UNICODE = { { "local": [ { - "user": { - "name": "{0} {1}", - "email": "{2}" - }, - "group": { - "id": EMPLOYEE_GROUP_ID - } + "user": {"name": "{0} {1}", "email": "{2}"}, + "group": {"id": EMPLOYEE_GROUP_ID}, } ], "remote": [ - { - "type": "PFX_FirstName" - }, - { - "type": "PFX_LastName" - }, - { - "type": "PFX_Email" - }, + {"type": "PFX_FirstName"}, + {"type": "PFX_LastName"}, + {"type": "PFX_Email"}, { "type": "PFX_orgPersonType", - "any_one_of": [ - "Admin", - "Big Cheese" - ] - } - ] + "any_one_of": ["Admin", "Big Cheese"], + }, + ], }, ], } @@ -1820,36 +1176,28 @@ MAPPING_PROJECTS = { "rules": [ { "local": [ - { - "user": { - "name": "{0}" - } - }, + {"user": {"name": "{0}"}}, { "projects": [ - {"name": "Production", - "roles": [{"name": "observer"}]}, - {"name": "Staging", - "roles": [{"name": "member"}]}, - {"name": "Project for {0}", - "roles": [{"name": "admin"}]}, + { + "name": "Production", + "roles": [{"name": "observer"}], + }, + {"name": "Staging", "roles": [{"name": "member"}]}, + { + "name": "Project for {0}", + "roles": [{"name": "admin"}], + }, ], - } + }, ], "remote": [ - { - "type": "UserName" - }, + {"type": "UserName"}, { "type": "Email", }, - { - "type": "orgPersonType", - "any_one_of": [ - "Employee" - ] - } - ] + {"type": "orgPersonType", "any_one_of": ["Employee"]}, + ], } ] } @@ -1859,9 +1207,7 @@ MAPPING_PROJECTS_WITHOUT_ROLES = { { "local": [ { - "user": { - "name": "{0}" - }, + "user": {"name": "{0}"}, "projects": [ {"name": "a"}, {"name": "b"}, @@ -1869,11 +1215,7 @@ MAPPING_PROJECTS_WITHOUT_ROLES = { ], } ], - "remote": [ - { - "type": "UserName" - } - ] + "remote": [{"type": "UserName"}], }, ] } @@ -1883,23 +1225,18 @@ MAPPING_PROJECTS_WITHOUT_NAME = { { "local": [ { - "user": { - "name": "{0}" - }, + "user": {"name": "{0}"}, "projects": [ {"roles": [{"name": "observer"}]}, - {"name": "Staging", - "roles": [{"name": "member"}]}, - {"name": "Project for {0}", - "roles": [{"name": "admin"}]}, - ] + {"name": "Staging", "roles": [{"name": "member"}]}, + { + "name": "Project for {0}", + "roles": [{"name": "admin"}], + }, + ], } ], - "remote": [ - { - "type": "UserName" - } - ] + "remote": [{"type": "UserName"}], }, ] } diff --git a/keystone/tests/unit/policy/backends/test_base.py b/keystone/tests/unit/policy/backends/test_base.py index 94f07b050e..7d8d975c55 100644 --- a/keystone/tests/unit/policy/backends/test_base.py +++ b/keystone/tests/unit/policy/backends/test_base.py @@ -21,9 +21,11 @@ class DriverTestCase(object): def setUp(self): super(DriverTestCase, self).setUp() - self.policy = {'id': uuid.uuid4().hex, - 'blob': '{"identity:create_user": "role:domain_admin"}', - 'type': 'application/json'} + self.policy = { + 'id': uuid.uuid4().hex, + 'blob': '{"identity:create_user": "role:domain_admin"}', + 'type': 'application/json', + } self.driver.create_policy(self.policy['id'], self.policy) @property @@ -31,9 +33,11 @@ class DriverTestCase(object): raise exception.NotImplemented() def test_list_policies(self): - another_policy = {'id': uuid.uuid4().hex, - 'blob': '{"compute:create": "role:project_member"}', - 'type': 'application/json'} + another_policy = { + 'id': uuid.uuid4().hex, + 'blob': '{"compute:create": "role:project_member"}', + 'type': 'application/json', + } self.driver.create_policy(another_policy['id'], another_policy) policies = self.driver.list_policies() @@ -41,21 +45,25 @@ class DriverTestCase(object): self.assertCountEqual([self.policy, another_policy], policies) def test_get_policy(self): - self.assertEqual(self.policy, - self.driver.get_policy(self.policy['id'])) + self.assertEqual( + self.policy, self.driver.get_policy(self.policy['id']) + ) def test_update_policy(self): - self.policy['blob'] = ('{"identity:create_user": "role:domain_admin",' - '"identity:update_user": "role:domain_admin"}') + self.policy['blob'] = ( + '{"identity:create_user": "role:domain_admin",' + '"identity:update_user": "role:domain_admin"}' + ) self.driver.update_policy(self.policy['id'], self.policy) - self.assertEqual(self.policy, - self.driver.get_policy(self.policy['id'])) + self.assertEqual( + self.policy, self.driver.get_policy(self.policy['id']) + ) def test_delete_policy(self): self.driver.delete_policy(self.policy['id']) - self.assertRaises(exception.PolicyNotFound, - self.driver.get_policy, - self.policy['id']) + self.assertRaises( + exception.PolicyNotFound, self.driver.get_policy, self.policy['id'] + ) diff --git a/keystone/tests/unit/policy/backends/test_sql.py b/keystone/tests/unit/policy/backends/test_sql.py index 810096b0d4..ce29429193 100644 --- a/keystone/tests/unit/policy/backends/test_sql.py +++ b/keystone/tests/unit/policy/backends/test_sql.py @@ -22,10 +22,12 @@ class SQLModelTestCase(core_sql.BaseBackendSqlModels): """Test cases to validate the table structure.""" def test_policy_model(self): - cols = (('id', sql.String, 64), - ('blob', sql.JsonBlob, None), - ('type', sql.String, 255), - ('extra', sql.JsonBlob, None)) + cols = ( + ('id', sql.String, 64), + ('blob', sql.JsonBlob, None), + ('type', sql.String, 255), + ('extra', sql.JsonBlob, None), + ) self.assertExpectedSchema('policy', cols) diff --git a/keystone/tests/unit/policy/test_backends.py b/keystone/tests/unit/policy/test_backends.py index b9b778637f..b259dcaecb 100644 --- a/keystone/tests/unit/policy/test_backends.py +++ b/keystone/tests/unit/policy/test_backends.py @@ -48,10 +48,12 @@ class PolicyTests(object): ref = unit.new_policy_ref() # (cannot change policy ID) - self.assertRaises(exception.ValidationError, - PROVIDERS.policy_api.update_policy, - orig['id'], - ref) + self.assertRaises( + exception.ValidationError, + PROVIDERS.policy_api.update_policy, + orig['id'], + ref, + ) ref['id'] = orig['id'] res = PROVIDERS.policy_api.update_policy(orig['id'], ref) @@ -62,28 +64,38 @@ class PolicyTests(object): PROVIDERS.policy_api.create_policy(ref['id'], ref) PROVIDERS.policy_api.delete_policy(ref['id']) - self.assertRaises(exception.PolicyNotFound, - PROVIDERS.policy_api.delete_policy, - ref['id']) - self.assertRaises(exception.PolicyNotFound, - PROVIDERS.policy_api.get_policy, - ref['id']) + self.assertRaises( + exception.PolicyNotFound, + PROVIDERS.policy_api.delete_policy, + ref['id'], + ) + self.assertRaises( + exception.PolicyNotFound, + PROVIDERS.policy_api.get_policy, + ref['id'], + ) res = PROVIDERS.policy_api.list_policies() self.assertFalse(len([x for x in res if x['id'] == ref['id']])) def test_get_policy_returns_not_found(self): - self.assertRaises(exception.PolicyNotFound, - PROVIDERS.policy_api.get_policy, - uuid.uuid4().hex) + self.assertRaises( + exception.PolicyNotFound, + PROVIDERS.policy_api.get_policy, + uuid.uuid4().hex, + ) def test_update_policy_returns_not_found(self): ref = unit.new_policy_ref() - self.assertRaises(exception.PolicyNotFound, - PROVIDERS.policy_api.update_policy, - ref['id'], - ref) + self.assertRaises( + exception.PolicyNotFound, + PROVIDERS.policy_api.update_policy, + ref['id'], + ref, + ) def test_delete_policy_returns_not_found(self): - self.assertRaises(exception.PolicyNotFound, - PROVIDERS.policy_api.delete_policy, - uuid.uuid4().hex) + self.assertRaises( + exception.PolicyNotFound, + PROVIDERS.policy_api.delete_policy, + uuid.uuid4().hex, + ) diff --git a/keystone/tests/unit/receipt/test_fernet_provider.py b/keystone/tests/unit/receipt/test_fernet_provider.py index e1a966e199..3deba23ab5 100644 --- a/keystone/tests/unit/receipt/test_fernet_provider.py +++ b/keystone/tests/unit/receipt/test_fernet_provider.py @@ -48,7 +48,8 @@ class TestFernetReceiptProvider(unit.TestCase): e = self.assertRaises( exception.ReceiptNotFound, self.provider.validate_receipt, - receipt_id) + receipt_id, + ) self.assertIn(receipt_id, u'%s' % e) @@ -58,11 +59,13 @@ class TestValidate(unit.TestCase): self.useFixture(database.Database()) self.useFixture( ksfixtures.ConfigAuthPlugins( - self.config_fixture, - ['totp', 'token', 'password'])) + self.config_fixture, ['totp', 'token', 'password'] + ) + ) self.load_backends() PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) def config_overrides(self): super(TestValidate, self).config_overrides() @@ -89,18 +92,18 @@ class TestValidate(unit.TestCase): PROVIDERS.identity_api.update_user(user_ref['id'], user_ref) method_names = ['password'] - receipt = PROVIDERS.receipt_provider_api.\ - issue_receipt(user_ref['id'], method_names) + receipt = PROVIDERS.receipt_provider_api.issue_receipt( + user_ref['id'], method_names + ) - receipt = PROVIDERS.receipt_provider_api.validate_receipt( - receipt.id) + receipt = PROVIDERS.receipt_provider_api.validate_receipt(receipt.id) self.assertIsInstance(receipt.expires_at, str) self.assertIsInstance(receipt.issued_at, str) self.assertEqual(set(method_names), set(receipt.methods)) self.assertEqual( set(frozenset(r) for r in rule_list), - set(frozenset(r) for r in - receipt.required_methods)) + set(frozenset(r) for r in receipt.required_methods), + ) self.assertEqual(user_ref['id'], receipt.user_id) def test_validate_v3_receipt_validation_error_exc(self): @@ -111,7 +114,7 @@ class TestValidate(unit.TestCase): self.assertRaises( exception.ReceiptNotFound, PROVIDERS.receipt_provider_api.validate_receipt, - receipt_id + receipt_id, ) @@ -131,7 +134,8 @@ class TestReceiptFormatter(unit.TestCase): self.assertFalse(encoded_str_without_padding.endswith('=')) encoded_str_with_padding_restored = ( receipt_formatters.ReceiptFormatter.restore_padding( - encoded_str_without_padding) + encoded_str_without_padding + ) ) self.assertEqual(encoded_string, encoded_str_with_padding_restored) @@ -142,7 +146,9 @@ class TestPayloads(unit.TestCase): super(TestPayloads, self).setUp() self.useFixture( ksfixtures.ConfigAuthPlugins( - self.config_fixture, ['totp', 'token', 'password'])) + self.config_fixture, ['totp', 'token', 'password'] + ) + ) def assertTimestampsEqual(self, expected, actual): # The timestamp that we get back when parsing the payload may not @@ -155,8 +161,9 @@ class TestPayloads(unit.TestCase): # the granularity of timestamp string is microseconds and it's only the # last digit in the representation that's different, so use a delta # just above nanoseconds. - return self.assertCloseEnoughForGovernmentWork(exp_time, actual_time, - delta=1e-05) + return self.assertCloseEnoughForGovernmentWork( + exp_time, actual_time, delta=1e-05 + ) def test_strings_can_be_converted_to_bytes(self): s = token_provider.random_urlsafe_str() @@ -172,10 +179,12 @@ class TestPayloads(unit.TestCase): uuid_obj = uuid.UUID(expected_hex_uuid) expected_uuid_in_bytes = uuid_obj.bytes actual_uuid_in_bytes = payload_cls.convert_uuid_hex_to_bytes( - expected_hex_uuid) + expected_hex_uuid + ) self.assertEqual(expected_uuid_in_bytes, actual_uuid_in_bytes) actual_hex_uuid = payload_cls.convert_uuid_bytes_to_hex( - expected_uuid_in_bytes) + expected_uuid_in_bytes + ) self.assertEqual(expected_hex_uuid, actual_hex_uuid) def test_time_string_to_float_conversions(self): @@ -184,8 +193,9 @@ class TestPayloads(unit.TestCase): original_time_str = utils.isotime(subsecond=True) time_obj = timeutils.parse_isotime(original_time_str) expected_time_float = ( - (timeutils.normalize_time(time_obj) - - datetime.datetime.utcfromtimestamp(0)).total_seconds()) + timeutils.normalize_time(time_obj) + - datetime.datetime.utcfromtimestamp(0) + ).total_seconds() # NOTE(lbragstad): The receipt expiration time for Fernet receipts is # passed in the payload of the receipt. This is different from the @@ -194,7 +204,8 @@ class TestPayloads(unit.TestCase): self.assertIsInstance(expected_time_float, float) actual_time_float = payload_cls._convert_time_string_to_float( - original_time_str) + original_time_str + ) self.assertIsInstance(actual_time_float, float) self.assertEqual(expected_time_float, actual_time_float) @@ -205,7 +216,8 @@ class TestPayloads(unit.TestCase): expected_time_str = utils.isotime(time_object, subsecond=True) actual_time_str = payload_cls._convert_float_to_time_string( - actual_time_float) + actual_time_float + ) self.assertEqual(expected_time_str, actual_time_str) def _test_payload(self, payload_class, exp_user_id=None, exp_methods=None): @@ -214,7 +226,8 @@ class TestPayloads(unit.TestCase): exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True) payload = payload_class.assemble( - exp_user_id, exp_methods, exp_expires_at) + exp_user_id, exp_methods, exp_expires_at + ) (user_id, methods, expires_at) = payload_class.disassemble(payload) @@ -227,8 +240,8 @@ class TestPayloads(unit.TestCase): def test_payload_multiple_methods(self): self._test_payload( - receipt_formatters.ReceiptPayload, - exp_methods=['password', 'totp']) + receipt_formatters.ReceiptPayload, exp_methods=['password', 'totp'] + ) class TestFernetKeyRotation(unit.TestCase): @@ -243,7 +256,8 @@ class TestFernetKeyRotation(unit.TestCase): def keys(self): """Key files converted to numbers.""" return sorted( - int(x) for x in os.listdir(CONF.fernet_receipts.key_repository)) + int(x) for x in os.listdir(CONF.fernet_receipts.key_repository) + ) @property def key_repository_size(self): @@ -266,7 +280,7 @@ class TestFernetKeyRotation(unit.TestCase): key_utils = fernet_utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) keys = key_utils.load_keys() @@ -307,8 +321,9 @@ class TestFernetKeyRotation(unit.TestCase): # Simulate every rotation strategy up to "rotating once a week while # maintaining a year's worth of keys." for max_active_keys in range(min_active_keys, 52 + 1): - self.config_fixture.config(group='fernet_receipts', - max_active_keys=max_active_keys) + self.config_fixture.config( + group='fernet_receipts', max_active_keys=max_active_keys + ) # Ensure that resetting the key repository always results in 2 # active keys. @@ -316,7 +331,7 @@ class TestFernetKeyRotation(unit.TestCase): ksfixtures.KeyRepository( self.config_fixture, 'fernet_receipts', - CONF.fernet_receipts.max_active_keys + CONF.fernet_receipts.max_active_keys, ) ) @@ -334,7 +349,7 @@ class TestFernetKeyRotation(unit.TestCase): key_utils = fernet_utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) for rotation in range(max_active_keys - min_active_keys): key_utils.rotate_keys() @@ -352,7 +367,7 @@ class TestFernetKeyRotation(unit.TestCase): key_utils = fernet_utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) for rotation in range(10): key_utils.rotate_keys() @@ -370,7 +385,7 @@ class TestFernetKeyRotation(unit.TestCase): key_utils = fernet_utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) # Simulate the disk full situation @@ -407,7 +422,7 @@ class TestFernetKeyRotation(unit.TestCase): key_utils = fernet_utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) # Rotate the keys to overwrite the empty file key_utils.rotate_keys() @@ -423,7 +438,7 @@ class TestFernetKeyRotation(unit.TestCase): key_utils = fernet_utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) key_utils.rotate_keys() self.assertTrue(os.path.isfile(evil_file)) @@ -450,7 +465,7 @@ class TestLoadKeys(unit.TestCase): key_utils = fernet_utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) keys = key_utils.load_keys() self.assertEqual(2, len(keys)) @@ -463,7 +478,7 @@ class TestLoadKeys(unit.TestCase): key_utils = fernet_utils.FernetUtils( CONF.fernet_receipts.key_repository, CONF.fernet_receipts.max_active_keys, - 'fernet_receipts' + 'fernet_receipts', ) keys = key_utils.load_keys() self.assertEqual(2, len(keys)) diff --git a/keystone/tests/unit/receipt/test_receipt_serialization.py b/keystone/tests/unit/receipt/test_receipt_serialization.py index 8053890b17..c6e2ffef63 100644 --- a/keystone/tests/unit/receipt/test_receipt_serialization.py +++ b/keystone/tests/unit/receipt/test_receipt_serialization.py @@ -50,11 +50,12 @@ class TestReceiptSerialization(base_classes.TestCaseWithBootstrap): self.assertEqual(self.exp_receipt.issued_at, receipt.issued_at) @mock.patch.object( - receipt_model.ReceiptModel, '__init__', side_effect=Exception) + receipt_model.ReceiptModel, '__init__', side_effect=Exception + ) def test_error_handling_in_deserialize(self, handler_mock): serialized = self.receipt_handler.serialize(self.exp_receipt) self.assertRaises( exception.CacheDeserializationError, self.receipt_handler.deserialize, - serialized + serialized, ) diff --git a/keystone/tests/unit/resource/backends/test_sql.py b/keystone/tests/unit/resource/backends/test_sql.py index 163c1734b1..a31d246b64 100644 --- a/keystone/tests/unit/resource/backends/test_sql.py +++ b/keystone/tests/unit/resource/backends/test_sql.py @@ -17,8 +17,9 @@ from keystone.tests.unit.ksfixtures import database from keystone.tests.unit.resource import test_backends -class TestSqlResourceDriver(unit.BaseTestCase, - test_backends.ResourceDriverTests): +class TestSqlResourceDriver( + unit.BaseTestCase, test_backends.ResourceDriverTests +): def setUp(self): super(TestSqlResourceDriver, self).setUp() self.useFixture(database.Database()) @@ -26,5 +27,4 @@ class TestSqlResourceDriver(unit.BaseTestCase, root_domain = default_fixtures.ROOT_DOMAIN root_domain['domain_id'] = root_domain['id'] root_domain['is_domain'] = True - self.driver.create_project(root_domain['id'], - root_domain) + self.driver.create_project(root_domain['id'], root_domain) diff --git a/keystone/tests/unit/resource/config_backends/test_sql.py b/keystone/tests/unit/resource/config_backends/test_sql.py index b4c5f262f0..ba3fea5d04 100644 --- a/keystone/tests/unit/resource/config_backends/test_sql.py +++ b/keystone/tests/unit/resource/config_backends/test_sql.py @@ -22,30 +22,36 @@ from keystone.tests.unit.resource import test_core class SqlDomainConfigModels(core_sql.BaseBackendSqlModels): def test_whitelisted_model(self): - cols = (('domain_id', sql.String, 64), - ('group', sql.String, 255), - ('option', sql.String, 255), - ('value', sql.JsonBlob, None)) + cols = ( + ('domain_id', sql.String, 64), + ('group', sql.String, 255), + ('option', sql.String, 255), + ('value', sql.JsonBlob, None), + ) self.assertExpectedSchema('whitelisted_config', cols) def test_sensitive_model(self): - cols = (('domain_id', sql.String, 64), - ('group', sql.String, 255), - ('option', sql.String, 255), - ('value', sql.JsonBlob, None)) + cols = ( + ('domain_id', sql.String, 64), + ('group', sql.String, 255), + ('option', sql.String, 255), + ('value', sql.JsonBlob, None), + ) self.assertExpectedSchema('sensitive_config', cols) -class SqlDomainConfigDriver(unit.BaseTestCase, - test_core.DomainConfigDriverTests): +class SqlDomainConfigDriver( + unit.BaseTestCase, test_core.DomainConfigDriverTests +): def setUp(self): super(SqlDomainConfigDriver, self).setUp() self.useFixture(database.Database()) self.driver = config_sql.DomainConfig() -class SqlDomainConfig(core_sql.BaseBackendSqlTests, - test_core.DomainConfigTests): +class SqlDomainConfig( + core_sql.BaseBackendSqlTests, test_core.DomainConfigTests +): def setUp(self): super(SqlDomainConfig, self).setUp() # test_core.DomainConfigTests is effectively a mixin class, so make diff --git a/keystone/tests/unit/resource/test_backends.py b/keystone/tests/unit/resource/test_backends.py index 18935ccdc3..a377d140cc 100644 --- a/keystone/tests/unit/resource/test_backends.py +++ b/keystone/tests/unit/resource/test_backends.py @@ -37,79 +37,94 @@ class ResourceTests(object): def test_get_project(self): project_ref = PROVIDERS.resource_api.get_project( - self.project_bar['id']) + self.project_bar['id'] + ) self.assertDictEqual(self.project_bar, project_ref) def test_get_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - uuid.uuid4().hex) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + uuid.uuid4().hex, + ) def test_get_project_by_name(self): project_ref = PROVIDERS.resource_api.get_project_by_name( - self.project_bar['name'], - CONF.identity.default_domain_id) + self.project_bar['name'], CONF.identity.default_domain_id + ) self.assertDictEqual(self.project_bar, project_ref) @unit.skip_if_no_multiple_domains_support def test_get_project_by_name_for_project_acting_as_a_domain(self): """Test get_project_by_name works when the domain_id is None.""" project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, is_domain=False) + domain_id=CONF.identity.default_domain_id, is_domain=False + ) project = PROVIDERS.resource_api.create_project(project['id'], project) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project_by_name, - project['name'], - None) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project_by_name, + project['name'], + None, + ) # Test that querying with domain_id as None will find the project # acting as a domain, even if it's name is the same as the regular # project above. - project2 = unit.new_project_ref(is_domain=True, - name=project['name']) + project2 = unit.new_project_ref(is_domain=True, name=project['name']) project2 = PROVIDERS.resource_api.create_project( project2['id'], project2 ) project_ref = PROVIDERS.resource_api.get_project_by_name( - project2['name'], None) + project2['name'], None + ) self.assertEqual(project2, project_ref) def test_get_project_by_name_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project_by_name, - uuid.uuid4().hex, - CONF.identity.default_domain_id) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project_by_name, + uuid.uuid4().hex, + CONF.identity.default_domain_id, + ) def test_create_duplicate_project_id_fails(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project_id = project['id'] PROVIDERS.resource_api.create_project(project_id, project) project['name'] = 'fake2' - self.assertRaises(exception.Conflict, - PROVIDERS.resource_api.create_project, - project_id, - project) + self.assertRaises( + exception.Conflict, + PROVIDERS.resource_api.create_project, + project_id, + project, + ) def test_create_duplicate_project_name_fails(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project_id = project['id'] PROVIDERS.resource_api.create_project(project_id, project) project['id'] = 'fake2' - self.assertRaises(exception.Conflict, - PROVIDERS.resource_api.create_project, - project['id'], - project) + self.assertRaises( + exception.Conflict, + PROVIDERS.resource_api.create_project, + project['id'], + project, + ) def test_create_project_name_with_trailing_whitespace(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project_id = project['id'] - project_name = project['name'] = (project['name'] + ' ') + project_name = project['name'] = project['name'] + ' ' project_returned = PROVIDERS.resource_api.create_project( project_id, project ) @@ -120,47 +135,57 @@ class ResourceTests(object): new_domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain) project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project2 = unit.new_project_ref(name=project1['name'], - domain_id=new_domain['id']) + domain_id=CONF.identity.default_domain_id + ) + project2 = unit.new_project_ref( + name=project1['name'], domain_id=new_domain['id'] + ) PROVIDERS.resource_api.create_project(project1['id'], project1) PROVIDERS.resource_api.create_project(project2['id'], project2) def test_rename_duplicate_project_name_fails(self): project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project2 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project1['id'], project1) PROVIDERS.resource_api.create_project(project2['id'], project2) project2['name'] = project1['name'] - self.assertRaises(exception.Error, - PROVIDERS.resource_api.update_project, - project2['id'], - project2) + self.assertRaises( + exception.Error, + PROVIDERS.resource_api.update_project, + project2['id'], + project2, + ) def test_update_project_id_does_nothing(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project_id = project['id'] PROVIDERS.resource_api.create_project(project['id'], project) project['id'] = 'fake2' PROVIDERS.resource_api.update_project(project_id, project) project_ref = PROVIDERS.resource_api.get_project(project_id) self.assertEqual(project_id, project_ref['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - 'fake2') + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + 'fake2', + ) def test_update_project_name_with_trailing_whitespace(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project_id = project['id'] project_create = PROVIDERS.resource_api.create_project( project_id, project ) self.assertEqual(project_id, project_create['id']) - project_name = project['name'] = (project['name'] + ' ') + project_name = project['name'] = project['name'] + ' ' project_update = PROVIDERS.resource_api.update_project( project_id, project ) @@ -172,21 +197,26 @@ class ResourceTests(object): pass def test_update_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.update_project, - uuid.uuid4().hex, - dict()) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.update_project, + uuid.uuid4().hex, + dict(), + ) def test_delete_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.delete_project, - uuid.uuid4().hex) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.delete_project, + uuid.uuid4().hex, + ) def test_create_update_delete_unicode_project(self): unicode_project_name = u'name \u540d\u5b57' project = unit.new_project_ref( name=unicode_project_name, - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id, + ) project = PROVIDERS.resource_api.create_project(project['id'], project) PROVIDERS.resource_api.update_project(project['id'], project) PROVIDERS.resource_api.delete_project(project['id']) @@ -201,18 +231,23 @@ class ResourceTests(object): def test_create_project_long_name_fails(self): project = unit.new_project_ref( - name='a' * 65, domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.create_project, - project['id'], - project) + name='a' * 65, domain_id=CONF.identity.default_domain_id + ) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.create_project, + project['id'], + project, + ) def test_create_project_invalid_domain_id(self): project = unit.new_project_ref(domain_id=uuid.uuid4().hex) - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.create_project, - project['id'], - project) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.create_project, + project['id'], + project, + ) def test_list_domains(self): domain1 = unit.new_domain_ref() @@ -238,7 +273,8 @@ class ResourceTests(object): def test_list_projects_with_multiple_filters(self): # Create a project project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project = PROVIDERS.resource_api.create_project(project['id'], project) # Build driver hints with the project's name and inexistent description @@ -263,14 +299,18 @@ class ResourceTests(object): self.assertEqual(project, projects[0]) def test_list_projects_for_domain(self): - project_ids = ([x['id'] for x in - PROVIDERS.resource_api.list_projects_in_domain( - CONF.identity.default_domain_id)]) + project_ids = [ + x['id'] + for x in PROVIDERS.resource_api.list_projects_in_domain( + CONF.identity.default_domain_id + ) + ] # Only the projects from the default fixtures are expected, since # filtering by domain does not include any project that acts as a # domain. self.assertThat( - project_ids, matchers.HasLength(len(default_fixtures.PROJECTS))) + project_ids, matchers.HasLength(len(default_fixtures.PROJECTS)) + ) self.assertIn(self.project_bar['id'], project_ids) self.assertIn(self.project_baz['id'], project_ids) self.assertIn(self.project_mtu['id'], project_ids) @@ -294,8 +334,9 @@ class ResourceTests(object): self._create_projects_hierarchy(hierarchy_size=2) projects = PROVIDERS.resource_api.list_projects_acting_as_domain() - expected_number_projects = ( - len(initial_domains) + len(new_projects_acting_as_domains)) + expected_number_projects = len(initial_domains) + len( + new_projects_acting_as_domains + ) self.assertEqual(expected_number_projects, len(projects)) for project in new_projects_acting_as_domains: self.assertIn(project, projects) @@ -310,17 +351,23 @@ class ResourceTests(object): PROVIDERS.resource_api.create_project(project1['id'], project1) project2 = unit.new_project_ref(domain_id=domain1['id']) PROVIDERS.resource_api.create_project(project2['id'], project2) - project_ids = ([x['id'] for x in - PROVIDERS.resource_api.list_projects_in_domain( - domain1['id'])]) + project_ids = [ + x['id'] + for x in PROVIDERS.resource_api.list_projects_in_domain( + domain1['id'] + ) + ] self.assertEqual(2, len(project_ids)) self.assertIn(project1['id'], project_ids) self.assertIn(project2['id'], project_ids) - def _create_projects_hierarchy(self, hierarchy_size=2, - domain_id=None, - is_domain=False, - parent_project_id=None): + def _create_projects_hierarchy( + self, + hierarchy_size=2, + domain_id=None, + is_domain=False, + parent_project_id=None, + ): """Create a project hierarchy with specified size. :param hierarchy_size: the desired hierarchy size, default is 2 - @@ -338,19 +385,23 @@ class ResourceTests(object): if domain_id is None: domain_id = CONF.identity.default_domain_id if parent_project_id: - project = unit.new_project_ref(parent_id=parent_project_id, - domain_id=domain_id, - is_domain=is_domain) + project = unit.new_project_ref( + parent_id=parent_project_id, + domain_id=domain_id, + is_domain=is_domain, + ) else: - project = unit.new_project_ref(domain_id=domain_id, - is_domain=is_domain) + project = unit.new_project_ref( + domain_id=domain_id, is_domain=is_domain + ) project_id = project['id'] project = PROVIDERS.resource_api.create_project(project_id, project) projects = [project] for i in range(1, hierarchy_size): - new_project = unit.new_project_ref(parent_id=project_id, - domain_id=domain_id) + new_project = unit.new_project_ref( + parent_id=project_id, domain_id=domain_id + ) PROVIDERS.resource_api.create_project( new_project['id'], new_project @@ -389,17 +440,21 @@ class ResourceTests(object): new_project = project.copy() new_project['id'] = uuid.uuid4().hex - self.assertRaises(exception.Conflict, - PROVIDERS.resource_api.create_project, - new_project['id'], - new_project) + self.assertRaises( + exception.Conflict, + PROVIDERS.resource_api.create_project, + new_project['id'], + new_project, + ) # We also should not be able to update one to have a name clash project2['name'] = project['name'] - self.assertRaises(exception.Conflict, - PROVIDERS.resource_api.update_project, - project2['id'], - project2) + self.assertRaises( + exception.Conflict, + PROVIDERS.resource_api.update_project, + project2['id'], + project2, + ) # But updating it to a unique name is OK project2['name'] = uuid.uuid4().hex @@ -408,7 +463,8 @@ class ResourceTests(object): # Finally, it should be OK to create a project with same name as one of # these acting as a domain, as long as it is a regular project project3 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, name=project2['name']) + domain_id=CONF.identity.default_domain_id, name=project2['name'] + ) PROVIDERS.resource_api.create_project(project3['id'], project3) # In fact, it should be OK to create such a project in the domain which # has the matching name. @@ -420,12 +476,13 @@ class ResourceTests(object): @test_utils.wip('waiting for sub projects acting as domains support') def test_is_domain_sub_project_has_parent_domain_id(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, is_domain=True) + domain_id=CONF.identity.default_domain_id, is_domain=True + ) PROVIDERS.resource_api.create_project(project['id'], project) - sub_project = unit.new_project_ref(domain_id=project['id'], - parent_id=project['id'], - is_domain=True) + sub_project = unit.new_project_ref( + domain_id=project['id'], parent_id=project['id'], is_domain=True + ) ref = PROVIDERS.resource_api.create_project( sub_project['id'], sub_project @@ -436,17 +493,18 @@ class ResourceTests(object): @unit.skip_if_no_multiple_domains_support def test_delete_domain_with_project_api(self): - project = unit.new_project_ref(domain_id=None, - is_domain=True) + project = unit.new_project_ref(domain_id=None, is_domain=True) PROVIDERS.resource_api.create_project(project['id'], project) # Check that a corresponding domain was created PROVIDERS.resource_api.get_domain(project['id']) # Try to delete the enabled project that acts as a domain - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.delete_project, - project['id']) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.delete_project, + project['id'], + ) # Disable the project project['enabled'] = False @@ -455,48 +513,59 @@ class ResourceTests(object): # Successfully delete the project PROVIDERS.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project['id'], + ) - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain, - project['id']) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain, + project['id'], + ) @unit.skip_if_no_multiple_domains_support def test_create_subproject_acting_as_domain_fails(self): root_project = unit.new_project_ref(is_domain=True) PROVIDERS.resource_api.create_project(root_project['id'], root_project) - sub_project = unit.new_project_ref(is_domain=True, - parent_id=root_project['id']) + sub_project = unit.new_project_ref( + is_domain=True, parent_id=root_project['id'] + ) # Creation of sub projects acting as domains is not allowed yet - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.create_project, - sub_project['id'], sub_project) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.create_project, + sub_project['id'], + sub_project, + ) @unit.skip_if_no_multiple_domains_support def test_create_domain_under_regular_project_hierarchy_fails(self): # Projects acting as domains can't have a regular project as parent projects_hierarchy = self._create_projects_hierarchy() parent = projects_hierarchy[1] - project = unit.new_project_ref(domain_id=parent['id'], - parent_id=parent['id'], - is_domain=True) + project = unit.new_project_ref( + domain_id=parent['id'], parent_id=parent['id'], is_domain=True + ) - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.create_project, - project['id'], project) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.create_project, + project['id'], + project, + ) @unit.skip_if_no_multiple_domains_support @test_utils.wip('waiting for sub projects acting as domains support') def test_create_project_under_domain_hierarchy(self): projects_hierarchy = self._create_projects_hierarchy(is_domain=True) parent = projects_hierarchy[1] - project = unit.new_project_ref(domain_id=parent['id'], - parent_id=parent['id'], - is_domain=False) + project = unit.new_project_ref( + domain_id=parent['id'], parent_id=parent['id'], is_domain=False + ) ref = PROVIDERS.resource_api.create_project(project['id'], project) self.assertFalse(ref['is_domain']) @@ -505,7 +574,8 @@ class ResourceTests(object): def test_create_project_without_is_domain_flag(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) del project['is_domain'] ref = PROVIDERS.resource_api.create_project(project['id'], project) # The is_domain flag should be False by default @@ -520,7 +590,8 @@ class ResourceTests(object): def test_create_project_passing_is_domain_flag_false(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, is_domain=False) + domain_id=CONF.identity.default_domain_id, is_domain=False + ) ref = PROVIDERS.resource_api.create_project(project['id'], project) self.assertIs(False, ref['is_domain']) @@ -561,27 +632,34 @@ class ResourceTests(object): # Now try to create a child with the above as its parent, but # specifying a different domain. sub_project = unit.new_project_ref( - parent_id=project['id'], domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.create_project, - sub_project['id'], sub_project) + parent_id=project['id'], domain_id=CONF.identity.default_domain_id + ) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.create_project, + sub_project['id'], + sub_project, + ) def test_check_leaf_projects(self): projects_hierarchy = self._create_projects_hierarchy() root_project = projects_hierarchy[0] leaf_project = projects_hierarchy[1] - self.assertFalse(PROVIDERS.resource_api.is_leaf_project( - root_project['id'])) - self.assertTrue(PROVIDERS.resource_api.is_leaf_project( - leaf_project['id'])) + self.assertFalse( + PROVIDERS.resource_api.is_leaf_project(root_project['id']) + ) + self.assertTrue( + PROVIDERS.resource_api.is_leaf_project(leaf_project['id']) + ) # Delete leaf_project PROVIDERS.resource_api.delete_project(leaf_project['id']) # Now, root_project should be leaf - self.assertTrue(PROVIDERS.resource_api.is_leaf_project( - root_project['id'])) + self.assertTrue( + PROVIDERS.resource_api.is_leaf_project(root_project['id']) + ) def test_list_projects_in_subtree(self): projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) @@ -589,8 +667,8 @@ class ResourceTests(object): project2 = projects_hierarchy[1] project3 = projects_hierarchy[2] project4 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=project2['id']) + domain_id=CONF.identity.default_domain_id, parent_id=project2['id'] + ) PROVIDERS.resource_api.create_project(project4['id'], project4) subtree = PROVIDERS.resource_api.list_projects_in_subtree( @@ -634,41 +712,43 @@ class ResourceTests(object): # Create large project hierarchy, of above depiction p1, p2, p4 = self._create_projects_hierarchy(hierarchy_size=3) p5 = self._create_projects_hierarchy( - hierarchy_size=1, parent_project_id=p2['id'])[0] + hierarchy_size=1, parent_project_id=p2['id'] + )[0] p3, p6, p8 = self._create_projects_hierarchy( - hierarchy_size=3, parent_project_id=p1['id']) + hierarchy_size=3, parent_project_id=p1['id'] + ) p9, p11 = self._create_projects_hierarchy( - hierarchy_size=2, parent_project_id=p6['id']) + hierarchy_size=2, parent_project_id=p6['id'] + ) p7, p10 = self._create_projects_hierarchy( - hierarchy_size=2, parent_project_id=p3['id']) + hierarchy_size=2, parent_project_id=p3['id'] + ) expected_projects = { - p2['id']: { - p5['id']: None, - p4['id']: None}, + p2['id']: {p5['id']: None, p4['id']: None}, p3['id']: { - p7['id']: { - p10['id']: None}, - p6['id']: { - p9['id']: { - p11['id']: None}, - p8['id']: None}}} + p7['id']: {p10['id']: None}, + p6['id']: {p9['id']: {p11['id']: None}, p8['id']: None}, + }, + } prjs_hierarchy = PROVIDERS.resource_api.get_projects_in_subtree_as_ids( - p1['id']) + p1['id'] + ) self.assertDictEqual(expected_projects, prjs_hierarchy) def test_list_projects_in_subtree_with_circular_reference(self): project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project1 = PROVIDERS.resource_api.create_project( project1['id'], project1 ) project2 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=project1['id']) + domain_id=CONF.identity.default_domain_id, parent_id=project1['id'] + ) PROVIDERS.resource_api.create_project(project2['id'], project2) project1['parent_id'] = project2['id'] # Adds cyclic reference @@ -688,13 +768,17 @@ class ResourceTests(object): self.assertIsNone(subtree) def test_list_projects_in_subtree_invalid_project_id(self): - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.list_projects_in_subtree, - None) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.list_projects_in_subtree, + None, + ) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.list_projects_in_subtree, - uuid.uuid4().hex) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.list_projects_in_subtree, + uuid.uuid4().hex, + ) def test_list_project_parents(self): projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) @@ -702,8 +786,8 @@ class ResourceTests(object): project2 = projects_hierarchy[1] project3 = projects_hierarchy[2] project4 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=project2['id']) + domain_id=CONF.identity.default_domain_id, parent_id=project2['id'] + ) PROVIDERS.resource_api.create_project(project4['id'], project4) parents1 = PROVIDERS.resource_api.list_project_parents(project3['id']) @@ -767,44 +851,55 @@ class ResourceTests(object): self.assertFalse(subtree[0]['enabled']) parent['enabled'] = True - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.update_project, - parent['id'], - parent, - cascade=True) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.update_project, + parent['id'], + parent, + cascade=True, + ) def test_update_cascade_only_accepts_enabled(self): # Update cascade does not accept any other attribute but 'enabled' new_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(new_project['id'], new_project) new_project['name'] = 'project1' - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.update_project, - new_project['id'], - new_project, - cascade=True) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.update_project, + new_project['id'], + new_project, + cascade=True, + ) def test_list_project_parents_invalid_project_id(self): - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.list_project_parents, - None) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.list_project_parents, + None, + ) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.list_project_parents, - uuid.uuid4().hex) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.list_project_parents, + uuid.uuid4().hex, + ) def test_create_project_doesnt_modify_passed_in_dict(self): new_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) original_project = new_project.copy() PROVIDERS.resource_api.create_project(new_project['id'], new_project) self.assertDictEqual(original_project, new_project) def test_update_project_enable(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project['id'], project) project_ref = PROVIDERS.resource_api.get_project(project['id']) self.assertTrue(project_ref['enabled']) @@ -832,13 +927,17 @@ class ResourceTests(object): def test_create_invalid_domain_fails(self): new_group = unit.new_group_ref(domain_id="doesnotexist") - self.assertRaises(exception.DomainNotFound, - PROVIDERS.identity_api.create_group, - new_group) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.identity_api.create_group, + new_group, + ) new_user = unit.new_user_ref(domain_id="doesnotexist") - self.assertRaises(exception.DomainNotFound, - PROVIDERS.identity_api.create_user, - new_user) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.identity_api.create_user, + new_user, + ) @unit.skip_if_no_multiple_domains_support def test_project_crud(self): @@ -855,9 +954,11 @@ class ResourceTests(object): self.assertLessEqual(project.items(), project_ref.items()) PROVIDERS.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project['id'], + ) def test_domain_delete_hierarchy(self): domain = unit.new_domain_ref() @@ -865,7 +966,8 @@ class ResourceTests(object): # Creating a root and a leaf project inside the domain projects_hierarchy = self._create_projects_hierarchy( - domain_id=domain['id']) + domain_id=domain['id'] + ) root_project = projects_hierarchy[0] leaf_project = projects_hierarchy[0] @@ -877,19 +979,25 @@ class ResourceTests(object): PROVIDERS.resource_api.delete_domain(domain['id']) # Make sure the domain no longer exists - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain, - domain['id']) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain, + domain['id'], + ) # Make sure the root project no longer exists - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - root_project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + root_project['id'], + ) # Make sure the leaf project no longer exists - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - leaf_project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + leaf_project['id'], + ) def test_delete_projects_from_ids(self): """Test the resource backend call delete_projects_from_ids. @@ -899,9 +1007,11 @@ class ResourceTests(object): called. """ project1_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project2_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) projects = (project1_ref, project2_ref) for project in projects: PROVIDERS.resource_api.create_project(project['id'], project) @@ -912,9 +1022,11 @@ class ResourceTests(object): # Ensuring projects no longer exist at backend level for project_id in projects_ids: - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.driver.get_project, - project_id) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.driver.get_project, + project_id, + ) # Passing an empty list is silently ignored PROVIDERS.resource_api.driver.delete_projects_from_ids([]) @@ -927,7 +1039,8 @@ class ResourceTests(object): the backend. """ project_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) # Setting up the ID's list @@ -936,9 +1049,11 @@ class ResourceTests(object): PROVIDERS.resource_api.delete_projects_from_ids(projects_ids) self.assertTrue(mock_log.warning.called) # The existing project was deleted. - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.driver.get_project, - project_ref['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.driver.get_project, + project_ref['id'], + ) # Even if we only have one project, and it does not exist, it returns # no error. @@ -961,9 +1076,11 @@ class ResourceTests(object): PROVIDERS.resource_api.delete_project(root_project['id'], cascade=True) for project in projects_hierarchy: - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project['id'], + ) def test_delete_large_project_cascade(self): """Try delete a large project with cascade true. @@ -982,17 +1099,17 @@ class ResourceTests(object): projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=4) p1 = projects_hierarchy[0] # Add the left branch to the hierarchy (p5, p6) - self._create_projects_hierarchy(hierarchy_size=2, - parent_project_id=p1['id']) + self._create_projects_hierarchy( + hierarchy_size=2, parent_project_id=p1['id'] + ) # Add p7 to the hierarchy p3_id = projects_hierarchy[2]['id'] - self._create_projects_hierarchy(hierarchy_size=1, - parent_project_id=p3_id) + self._create_projects_hierarchy( + hierarchy_size=1, parent_project_id=p3_id + ) # Reverse the hierarchy to disable the leaf first prjs_hierarchy = ( - [p1] + PROVIDERS.resource_api.list_projects_in_subtree( - p1['id'] - ) + [p1] + PROVIDERS.resource_api.list_projects_in_subtree(p1['id']) )[::-1] # Disabling all projects before attempting to delete @@ -1002,9 +1119,11 @@ class ResourceTests(object): PROVIDERS.resource_api.delete_project(p1['id'], cascade=True) for project in prjs_hierarchy: - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project['id'], + ) def test_cannot_delete_project_cascade_with_enabled_child(self): # create a hierarchy with 3 levels @@ -1017,10 +1136,12 @@ class ResourceTests(object): PROVIDERS.resource_api.update_project(project2['id'], project2) # Cannot cascade delete root_project, since project1 is enabled - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.delete_project, - root_project['id'], - cascade=True) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.delete_project, + root_project['id'], + cascade=True, + ) # Ensuring no project was deleted, not even project2 PROVIDERS.resource_api.get_project(root_project['id']) @@ -1045,45 +1166,58 @@ class ResourceTests(object): # update the parent_id is not allowed leaf_project['parent_id'] = root_project1['id'] - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.update_project, - leaf_project['id'], - leaf_project) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.update_project, + leaf_project['id'], + leaf_project, + ) # delete root_project1 PROVIDERS.resource_api.delete_project(root_project1['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - root_project1['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + root_project1['id'], + ) # delete root_project2 is not allowed since it is not a leaf project - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.delete_project, - root_project2['id']) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.delete_project, + root_project2['id'], + ) def test_create_project_with_invalid_parent(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, parent_id='fake') - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.create_project, - project['id'], - project) + domain_id=CONF.identity.default_domain_id, parent_id='fake' + ) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.create_project, + project['id'], + project, + ) @unit.skip_if_no_multiple_domains_support def test_create_leaf_project_with_different_domain(self): root_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(root_project['id'], root_project) domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) - leaf_project = unit.new_project_ref(domain_id=domain['id'], - parent_id=root_project['id']) + leaf_project = unit.new_project_ref( + domain_id=domain['id'], parent_id=root_project['id'] + ) - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.create_project, - leaf_project['id'], - leaf_project) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.create_project, + leaf_project['id'], + leaf_project, + ) def test_delete_hierarchical_leaf_project(self): projects_hierarchy = self._create_projects_hierarchy() @@ -1091,22 +1225,28 @@ class ResourceTests(object): leaf_project = projects_hierarchy[1] PROVIDERS.resource_api.delete_project(leaf_project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - leaf_project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + leaf_project['id'], + ) PROVIDERS.resource_api.delete_project(root_project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - root_project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + root_project['id'], + ) def test_delete_hierarchical_not_leaf_project(self): projects_hierarchy = self._create_projects_hierarchy() root_project = projects_hierarchy[0] - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.delete_project, - root_project['id']) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.delete_project, + root_project['id'], + ) def test_update_project_parent(self): projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) @@ -1119,26 +1259,31 @@ class ResourceTests(object): # try to update project3 parent to parent1 project3['parent_id'] = project1['id'] - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.update_project, - project3['id'], - project3) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.update_project, + project3['id'], + project3, + ) def test_create_project_under_disabled_one(self): project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, enabled=False) + domain_id=CONF.identity.default_domain_id, enabled=False + ) PROVIDERS.resource_api.create_project(project1['id'], project1) project2 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=project1['id']) + domain_id=CONF.identity.default_domain_id, parent_id=project1['id'] + ) # It's not possible to create a project under a disabled one in the # hierarchy - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.create_project, - project2['id'], - project2) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.create_project, + project2['id'], + project2, + ) def test_disable_hierarchical_leaf_project(self): projects_hierarchy = self._create_projects_hierarchy() @@ -1155,10 +1300,12 @@ class ResourceTests(object): root_project = projects_hierarchy[0] root_project['enabled'] = False - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.update_project, - root_project['id'], - root_project) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.update_project, + root_project['id'], + root_project, + ) def test_enable_project_with_disabled_parent(self): projects_hierarchy = self._create_projects_hierarchy() @@ -1174,10 +1321,12 @@ class ResourceTests(object): # Try to enable the leaf project, it's not possible since it has # a disabled parent leaf_project['enabled'] = True - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.update_project, - leaf_project['id'], - leaf_project) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.update_project, + leaf_project['id'], + leaf_project, + ) def _get_hierarchy_depth(self, project_id): return len(PROVIDERS.resource_api.list_project_parents(project_id)) + 1 @@ -1187,7 +1336,8 @@ class ResourceTests(object): # in the config option plus one (to allow for the additional project # acting as a domain after an upgrade) projects_hierarchy = self._create_projects_hierarchy( - CONF.max_project_tree_depth) + CONF.max_project_tree_depth + ) leaf_project = projects_hierarchy[CONF.max_project_tree_depth - 1] depth = self._get_hierarchy_depth(leaf_project['id']) @@ -1196,16 +1346,20 @@ class ResourceTests(object): # Creating another project in the hierarchy shouldn't be allowed project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id, - parent_id=leaf_project['id']) - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.create_project, - project['id'], - project) + parent_id=leaf_project['id'], + ) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.create_project, + project['id'], + project, + ) def test_project_update_missing_attrs_with_a_value(self): # Creating a project with no description attribute. project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) del project['description'] project = PROVIDERS.resource_api.create_project(project['id'], project) @@ -1219,7 +1373,8 @@ class ResourceTests(object): def test_project_update_missing_attrs_with_a_falsey_value(self): # Creating a project with no description attribute. project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) del project['description'] project = PROVIDERS.resource_api.create_project(project['id'], project) @@ -1244,9 +1399,11 @@ class ResourceTests(object): self.assertDictEqual(domain, domain_ref) # Ensure an 'enabled' domain cannot be deleted - self.assertRaises(exception.ForbiddenNotSecurity, - PROVIDERS.resource_api.delete_domain, - domain_id=domain['id']) + self.assertRaises( + exception.ForbiddenNotSecurity, + PROVIDERS.resource_api.delete_domain, + domain_id=domain['id'], + ) # Disable the domain domain['enabled'] = False @@ -1256,9 +1413,11 @@ class ResourceTests(object): PROVIDERS.resource_api.delete_domain(domain['id']) # Make sure the domain no longer exists - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain, - domain['id']) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain, + domain['id'], + ) @unit.skip_if_no_multiple_domains_support def test_delete_domain_call_db_time(self): @@ -1269,8 +1428,9 @@ class ResourceTests(object): PROVIDERS.resource_api.update_domain(domain['id'], domain) domain_ref = PROVIDERS.resource_api.get_project(domain['id']) - with mock.patch.object(resource_sql.Resource, - "get_project") as mock_get_project: + with mock.patch.object( + resource_sql.Resource, "get_project" + ) as mock_get_project: mock_get_project.return_value = domain_ref # Delete the domain @@ -1296,16 +1456,19 @@ class ResourceTests(object): # We can get each domain by name lower_case_domain_ref = PROVIDERS.resource_api.get_domain_by_name( - domain_name) + domain_name + ) self.assertDictEqual(lower_case_domain, lower_case_domain_ref) upper_case_domain_ref = PROVIDERS.resource_api.get_domain_by_name( - domain_name.upper()) + domain_name.upper() + ) self.assertDictEqual(upper_case_domain, upper_case_domain_ref) def test_project_attribute_update(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project['id'], project) # pick a key known to be non-existent @@ -1313,14 +1476,16 @@ class ResourceTests(object): def assert_key_equals(value): project_ref = PROVIDERS.resource_api.update_project( - project['id'], project) + project['id'], project + ) self.assertEqual(value, project_ref[key]) project_ref = PROVIDERS.resource_api.get_project(project['id']) self.assertEqual(value, project_ref[key]) def assert_get_key_is(value): project_ref = PROVIDERS.resource_api.update_project( - project['id'], project) + project['id'], project + ) self.assertIs(project_ref.get(key), value) project_ref = PROVIDERS.resource_api.get_project(project['id']) self.assertIs(project_ref.get(key), value) @@ -1360,9 +1525,11 @@ class ResourceTests(object): domain_ref = PROVIDERS.resource_api.get_domain_by_name(domain_name) domain_ref['name'] = uuid.uuid4().hex PROVIDERS.resource_api.update_domain(domain_id, domain_ref) - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain_by_name, - domain_name) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain_by_name, + domain_name, + ) @unit.skip_if_cache_disabled('resource') def test_cache_layer_domain_crud(self): @@ -1383,7 +1550,8 @@ class ResourceTests(object): # Verify get_domain still returns the domain self.assertLessEqual( domain_ref.items(), - PROVIDERS.resource_api.get_domain(domain_id).items()) + PROVIDERS.resource_api.get_domain(domain_id).items(), + ) # Invalidate cache PROVIDERS.resource_api.get_domain.invalidate( PROVIDERS.resource_api, domain_id @@ -1391,13 +1559,15 @@ class ResourceTests(object): # Verify get_domain returns the updated domain self.assertLessEqual( updated_domain_ref.items(), - PROVIDERS.resource_api.get_domain(domain_id).items()) + PROVIDERS.resource_api.get_domain(domain_id).items(), + ) # Update the domain back to original ref, using the assignment api # manager PROVIDERS.resource_api.update_domain(domain_id, domain_ref) self.assertLessEqual( domain_ref.items(), - PROVIDERS.resource_api.get_domain(domain_id).items()) + PROVIDERS.resource_api.get_domain(domain_id).items(), + ) # Make sure domain is 'disabled', bypass resource api manager project_domain_ref_disabled = project_domain_ref.copy() project_domain_ref_disabled['enabled'] = False @@ -1412,14 +1582,18 @@ class ResourceTests(object): # Verify get_domain still returns the domain self.assertLessEqual( domain_ref.items(), - PROVIDERS.resource_api.get_domain(domain_id).items()) + PROVIDERS.resource_api.get_domain(domain_id).items(), + ) # Invalidate cache PROVIDERS.resource_api.get_domain.invalidate( PROVIDERS.resource_api, domain_id ) # Verify get_domain now raises DomainNotFound - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain, domain_id) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain, + domain_id, + ) # Recreate Domain PROVIDERS.resource_api.create_domain(domain_id, domain) PROVIDERS.resource_api.get_domain(domain_id) @@ -1432,9 +1606,11 @@ class ResourceTests(object): # Delete domain PROVIDERS.resource_api.delete_domain(domain_id) # verify DomainNotFound raised - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain, - domain_id) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain, + domain_id, + ) @unit.skip_if_cache_disabled('resource') @unit.skip_if_no_multiple_domains_support @@ -1449,10 +1625,12 @@ class ResourceTests(object): PROVIDERS.resource_api.get_project_by_name(project_name, domain['id']) project['name'] = uuid.uuid4().hex PROVIDERS.resource_api.update_project(project_id, project) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project_by_name, - project_name, - domain['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project_by_name, + project_name, + domain['id'], + ) @unit.skip_if_cache_disabled('resource') @unit.skip_if_no_multiple_domains_support @@ -1473,7 +1651,8 @@ class ResourceTests(object): # Verify get_project still returns the original project_ref self.assertLessEqual( project.items(), - PROVIDERS.resource_api.get_project(project_id).items()) + PROVIDERS.resource_api.get_project(project_id).items(), + ) # Invalidate cache PROVIDERS.resource_api.get_project.invalidate( PROVIDERS.resource_api, project_id @@ -1481,36 +1660,43 @@ class ResourceTests(object): # Verify get_project now returns the new project self.assertLessEqual( updated_project.items(), - PROVIDERS.resource_api.get_project(project_id).items()) + PROVIDERS.resource_api.get_project(project_id).items(), + ) # Update project using the resource_api manager back to original PROVIDERS.resource_api.update_project(project['id'], project) # Verify get_project returns the original project_ref self.assertLessEqual( project.items(), - PROVIDERS.resource_api.get_project(project_id).items()) + PROVIDERS.resource_api.get_project(project_id).items(), + ) # Delete project bypassing resource PROVIDERS.resource_api.driver.delete_project(project_id) # Verify get_project still returns the project_ref self.assertLessEqual( project.items(), - PROVIDERS.resource_api.get_project(project_id).items()) + PROVIDERS.resource_api.get_project(project_id).items(), + ) # Invalidate cache PROVIDERS.resource_api.get_project.invalidate( PROVIDERS.resource_api, project_id ) # Verify ProjectNotFound now raised - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project_id) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project_id, + ) # recreate project PROVIDERS.resource_api.create_project(project_id, project) PROVIDERS.resource_api.get_project(project_id) # delete project PROVIDERS.resource_api.delete_project(project_id) # Verify ProjectNotFound is raised - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project_id) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project_id, + ) @unit.skip_if_no_multiple_domains_support def test_get_default_domain_by_name(self): @@ -1524,19 +1710,23 @@ class ResourceTests(object): def test_get_not_default_domain_by_name(self): domain_name = 'foo' - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain_by_name, - domain_name) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain_by_name, + domain_name, + ) def test_project_update_and_project_get_return_same_response(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project['id'], project) updated_project = {'enabled': False} updated_project_ref = PROVIDERS.resource_api.update_project( - project['id'], updated_project) + project['id'], updated_project + ) # SQL backend adds 'extra' field updated_project_ref.pop('extra', None) @@ -1549,9 +1739,11 @@ class ResourceTests(object): def test_delete_project_clears_default_project_id(self): self.config_fixture.config(group='cache', enabled=False) project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id, - project_id=project['id']) + domain_id=CONF.identity.default_domain_id + ) + user = unit.new_user_ref( + domain_id=CONF.identity.default_domain_id, project_id=project['id'] + ) PROVIDERS.resource_api.create_project(project['id'], project) user = PROVIDERS.identity_api.create_user(user) user = PROVIDERS.identity_api.get_user(user['id']) @@ -1565,9 +1757,11 @@ class ResourceTests(object): def test_delete_project_with_roles_clears_default_project_id(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id, - project_id=project['id']) + domain_id=CONF.identity.default_domain_id + ) + user = unit.new_user_ref( + domain_id=CONF.identity.default_domain_id, project_id=project['id'] + ) PROVIDERS.resource_api.create_project(project['id'], project) user = PROVIDERS.identity_api.create_user(user) role = unit.new_role_ref() @@ -1589,7 +1783,8 @@ class ResourceTests(object): """ tags = [uuid.uuid4().hex for i in range(num_of_tags)] ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, tags=tags) + domain_id=CONF.identity.default_domain_id, tags=tags + ) project = PROVIDERS.resource_api.create_project(ref['id'], ref) return project, tags @@ -1614,9 +1809,11 @@ class ResourceTests(object): self.assertEqual(tags[0], tag_ref[0]) def test_list_project_tags_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.list_project_tags, - uuid.uuid4().hex) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.list_project_tags, + uuid.uuid4().hex, + ) def test_get_project_tag(self): project, tags = self._create_project_and_tags() @@ -1658,30 +1855,36 @@ class ResourceTests(object): def test_update_project_tags_returns_not_found(self): _, tags = self._create_project_and_tags(num_of_tags=2) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.update_project_tags, - uuid.uuid4().hex, - tags) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.update_project_tags, + uuid.uuid4().hex, + tags, + ) def test_delete_tag_from_project(self): project, tags = self._create_project_and_tags(num_of_tags=2) tag_to_delete = tags[-1] PROVIDERS.resource_api.delete_project_tag(project['id'], tag_to_delete) project_tag_ref = PROVIDERS.resource_api.list_project_tags( - project['id']) + project['id'] + ) self.assertEqual(len(project_tag_ref), 1) self.assertEqual(project_tag_ref[0], tags[0]) def test_delete_project_tag_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.delete_project_tag, - uuid.uuid4().hex, - uuid.uuid4().hex) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.delete_project_tag, + uuid.uuid4().hex, + uuid.uuid4().hex, + ) def test_delete_project_tags(self): project, tags = self._create_project_and_tags(num_of_tags=5) project_tag_ref = PROVIDERS.resource_api.list_project_tags( - project['id']) + project['id'] + ) self.assertEqual(len(project_tag_ref), 5) PROVIDERS.resource_api.update_project_tags(project['id'], []) @@ -1692,175 +1895,189 @@ class ResourceTests(object): def test_create_project_immutable(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project['options'][ro_opt.IMMUTABLE_OPT.option_name] = True p_created = PROVIDERS.resource_api.create_project( - project['id'], project) + project['id'], project + ) project_via_manager = PROVIDERS.resource_api.get_project(project['id']) self.assertTrue('options' in p_created) self.assertTrue('options' in project_via_manager) self.assertTrue( - project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) - self.assertTrue( - p_created['options'][ro_opt.IMMUTABLE_OPT.option_name]) + project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) + self.assertTrue(p_created['options'][ro_opt.IMMUTABLE_OPT.option_name]) def test_cannot_update_immutable_project(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project['options'][ro_opt.IMMUTABLE_OPT.option_name] = True PROVIDERS.resource_api.create_project(project['id'], project) update_project = {'name': uuid.uuid4().hex} - self.assertRaises(exception.ResourceUpdateForbidden, - PROVIDERS.resource_api.update_project, - project['id'], - update_project) + self.assertRaises( + exception.ResourceUpdateForbidden, + PROVIDERS.resource_api.update_project, + project['id'], + update_project, + ) def test_cannot_update_immutable_project_while_unsetting_immutable(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project['options'][ro_opt.IMMUTABLE_OPT.option_name] = True PROVIDERS.resource_api.create_project(project['id'], project) update_project = { 'name': uuid.uuid4().hex, - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - }} - self.assertRaises(exception.ResourceUpdateForbidden, - PROVIDERS.resource_api.update_project, - project['id'], - update_project) + 'options': {ro_opt.IMMUTABLE_OPT.option_name: True}, + } + self.assertRaises( + exception.ResourceUpdateForbidden, + PROVIDERS.resource_api.update_project, + project['id'], + update_project, + ) def test_cannot_delete_immutable_project(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project['options'][ro_opt.IMMUTABLE_OPT.option_name] = True PROVIDERS.resource_api.create_project(project['id'], project) - self.assertRaises(exception.ResourceDeleteForbidden, - PROVIDERS.resource_api.delete_project, - project['id']) + self.assertRaises( + exception.ResourceDeleteForbidden, + PROVIDERS.resource_api.delete_project, + project['id'], + ) def test_update_project_set_immutable(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project['id'], project) - update_project = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - }} + update_project = {'options': {ro_opt.IMMUTABLE_OPT.option_name: True}} project_via_manager = PROVIDERS.resource_api.get_project(project['id']) self.assertTrue('options' in project_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options'] + ) p_update = PROVIDERS.resource_api.update_project( - project['id'], update_project) + project['id'], update_project + ) project_via_manager = PROVIDERS.resource_api.get_project(project['id']) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in p_update['options']) + ro_opt.IMMUTABLE_OPT.option_name in p_update['options'] + ) + self.assertTrue(p_update['options'][ro_opt.IMMUTABLE_OPT.option_name]) self.assertTrue( - p_update['options'][ro_opt.IMMUTABLE_OPT.option_name]) + ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options'] + ) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options']) - self.assertTrue( - project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) def test_update_project_set_immutable_with_additional_updates(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project['id'], project) update_project = { 'name': uuid.uuid4().hex, - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - }} + 'options': {ro_opt.IMMUTABLE_OPT.option_name: True}, + } project_via_manager = PROVIDERS.resource_api.get_project(project['id']) self.assertTrue('options' in project_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options'] + ) p_update = PROVIDERS.resource_api.update_project( - project['id'], update_project) + project['id'], update_project + ) project_via_manager = PROVIDERS.resource_api.get_project(project['id']) self.assertEqual(p_update['name'], update_project['name']) self.assertEqual(project_via_manager['name'], update_project['name']) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in p_update['options']) + ro_opt.IMMUTABLE_OPT.option_name in p_update['options'] + ) + self.assertTrue(p_update['options'][ro_opt.IMMUTABLE_OPT.option_name]) self.assertTrue( - p_update['options'][ro_opt.IMMUTABLE_OPT.option_name]) + ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options'] + ) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options']) - self.assertTrue( - project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) def test_update_project_unset_immutable(self): project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project['options'][ro_opt.IMMUTABLE_OPT.option_name] = True PROVIDERS.resource_api.create_project(project['id'], project) project_via_manager = PROVIDERS.resource_api.get_project(project['id']) self.assertTrue('options' in project_via_manager) self.assertTrue( - project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) - update_project = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: False - }} + update_project = {'options': {ro_opt.IMMUTABLE_OPT.option_name: False}} PROVIDERS.resource_api.update_project(project['id'], update_project) project_via_manager = PROVIDERS.resource_api.get_project(project['id']) self.assertTrue('options' in project_via_manager) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options'] + ) self.assertFalse( - project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + project_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) update_project = {'name': uuid.uuid4().hex} p_updated = PROVIDERS.resource_api.update_project( - project['id'], update_project) + project['id'], update_project + ) self.assertEqual(p_updated['name'], update_project['name']) - update_project = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: None - }} + update_project = {'options': {ro_opt.IMMUTABLE_OPT.option_name: None}} p_updated = PROVIDERS.resource_api.update_project( - project['id'], update_project) + project['id'], update_project + ) project_via_manager = PROVIDERS.resource_api.get_project(project['id']) self.assertTrue('options' in p_updated) self.assertTrue('options' in project_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in p_updated['options']) + ro_opt.IMMUTABLE_OPT.option_name in p_updated['options'] + ) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in project_via_manager['options'] + ) def test_cannot_delete_project_tags_immutable_project(self): project, tags = self._create_project_and_tags(num_of_tags=2) - update_project = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - } - } + update_project = {'options': {ro_opt.IMMUTABLE_OPT.option_name: True}} PROVIDERS.resource_api.update_project(project['id'], update_project) - self.assertRaises(exception.ResourceUpdateForbidden, - PROVIDERS.resource_api.delete_project_tag, - project['id'], - tags[0]) + self.assertRaises( + exception.ResourceUpdateForbidden, + PROVIDERS.resource_api.delete_project_tag, + project['id'], + tags[0], + ) def test_cannot_update_project_tags_immutable_project(self): # Update and Add tag use the same API project, tags = self._create_project_and_tags(num_of_tags=2) - update_project = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - } - } + update_project = {'options': {ro_opt.IMMUTABLE_OPT.option_name: True}} PROVIDERS.resource_api.update_project(project['id'], update_project) tags.append(uuid.uuid4().hex) - self.assertRaises(exception.ResourceUpdateForbidden, - PROVIDERS.resource_api.update_project_tags, - project['id'], - tags) + self.assertRaises( + exception.ResourceUpdateForbidden, + PROVIDERS.resource_api.update_project_tags, + project['id'], + tags, + ) @unit.skip_if_no_multiple_domains_support def test_create_domain_immutable(self): @@ -1870,7 +2087,7 @@ class ResourceTests(object): 'name': uuid.uuid4().hex, 'id': domain_id, 'is_domain': True, - 'options': {'immutable': True} + 'options': {'immutable': True}, } PROVIDERS.resource_api.create_domain(domain_id, domain) @@ -1886,15 +2103,17 @@ class ResourceTests(object): 'name': uuid.uuid4().hex, 'id': domain_id, 'is_domain': True, - 'options': {'immutable': True} + 'options': {'immutable': True}, } PROVIDERS.resource_api.create_domain(domain_id, domain) update_domain = {'name': uuid.uuid4().hex} - self.assertRaises(exception.ResourceUpdateForbidden, - PROVIDERS.resource_api.update_domain, - domain_id, - update_domain) + self.assertRaises( + exception.ResourceUpdateForbidden, + PROVIDERS.resource_api.update_domain, + domain_id, + update_domain, + ) @unit.skip_if_no_multiple_domains_support def test_cannot_delete_immutable_domain(self): @@ -1904,13 +2123,15 @@ class ResourceTests(object): 'name': uuid.uuid4().hex, 'id': domain_id, 'is_domain': True, - 'options': {'immutable': True} + 'options': {'immutable': True}, } PROVIDERS.resource_api.create_domain(domain_id, domain) - self.assertRaises(exception.ResourceDeleteForbidden, - PROVIDERS.resource_api.delete_domain, - domain_id,) + self.assertRaises( + exception.ResourceDeleteForbidden, + PROVIDERS.resource_api.delete_domain, + domain_id, + ) @unit.skip_if_no_multiple_domains_support def test_cannot_delete_disabled_domain_with_immutable_project(self): @@ -1930,9 +2151,11 @@ class ResourceTests(object): PROVIDERS.resource_api.update_domain(domain_id, {'enabled': False}) # attempt to delete the domain, should error when the immutable # project is reached - self.assertRaises(exception.ResourceDeleteForbidden, - PROVIDERS.resource_api.delete_domain, - domain_id) + self.assertRaises( + exception.ResourceDeleteForbidden, + PROVIDERS.resource_api.delete_domain, + domain_id, + ) @unit.skip_if_no_multiple_domains_support def test_update_domain_set_immutable(self): @@ -1949,23 +2172,24 @@ class ResourceTests(object): domain_via_manager = PROVIDERS.resource_api.get_domain(domain_id) self.assertTrue('options' in domain_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options'] + ) - domain_update = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: True - }} + domain_update = {'options': {ro_opt.IMMUTABLE_OPT.option_name: True}} d_update = PROVIDERS.resource_api.update_domain( - domain_id, domain_update) + domain_id, domain_update + ) domain_via_manager = PROVIDERS.resource_api.get_domain(domain_id) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in d_update['options']) + ro_opt.IMMUTABLE_OPT.option_name in d_update['options'] + ) + self.assertTrue(d_update['options'][ro_opt.IMMUTABLE_OPT.option_name]) self.assertTrue( - d_update['options'][ro_opt.IMMUTABLE_OPT.option_name]) + ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options'] + ) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options']) - self.assertTrue( - domain_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + domain_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) def test_update_domain_unset_immutable(self): # domains are projects, this should be the same as the project version @@ -1981,44 +2205,48 @@ class ResourceTests(object): domain_via_manager = PROVIDERS.resource_api.get_domain(domain_id) self.assertTrue('options' in domain_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options'] + ) - update_domain = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: False - }} + update_domain = {'options': {ro_opt.IMMUTABLE_OPT.option_name: False}} d_updated = PROVIDERS.resource_api.update_domain( - domain_id, update_domain) + domain_id, update_domain + ) domain_via_manager = PROVIDERS.resource_api.get_domain(domain_id) self.assertTrue('options' in domain_via_manager) self.assertTrue('options' in d_updated) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options'] + ) self.assertTrue( - ro_opt.IMMUTABLE_OPT.option_name in d_updated['options']) + ro_opt.IMMUTABLE_OPT.option_name in d_updated['options'] + ) self.assertFalse( - d_updated['options'][ro_opt.IMMUTABLE_OPT.option_name]) + d_updated['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) self.assertFalse( - domain_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name]) + domain_via_manager['options'][ro_opt.IMMUTABLE_OPT.option_name] + ) update_domain = {'name': uuid.uuid4().hex} d_updated = PROVIDERS.resource_api.update_domain( - domain_id, update_domain) + domain_id, update_domain + ) self.assertEqual(d_updated['name'], update_domain['name']) - update_domain = { - 'options': { - ro_opt.IMMUTABLE_OPT.option_name: None - }} + update_domain = {'options': {ro_opt.IMMUTABLE_OPT.option_name: None}} d_updated = PROVIDERS.resource_api.update_domain( - domain_id, update_domain) + domain_id, update_domain + ) domain_via_manager = PROVIDERS.resource_api.get_domain(domain_id) self.assertTrue('options' in d_updated) self.assertTrue('options' in domain_via_manager) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in d_updated['options']) + ro_opt.IMMUTABLE_OPT.option_name in d_updated['options'] + ) self.assertFalse( - ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options']) + ro_opt.IMMUTABLE_OPT.option_name in domain_via_manager['options'] + ) class ResourceDriverTests(object): @@ -2085,8 +2313,9 @@ class ResourceDriverTests(object): 'id': project_id, 'domain_id': domain_id, } - self.assertRaises(exception.Conflict, self.driver.create_project, - project_id, project) + self.assertRaises( + exception.Conflict, self.driver.create_project, project_id, project + ) def test_create_project_same_id_conflict(self): project_id = uuid.uuid4().hex @@ -2103,5 +2332,6 @@ class ResourceDriverTests(object): 'id': project_id, 'domain_id': default_fixtures.ROOT_DOMAIN['id'], } - self.assertRaises(exception.Conflict, self.driver.create_project, - project_id, project) + self.assertRaises( + exception.Conflict, self.driver.create_project, project_id, project + ) diff --git a/keystone/tests/unit/resource/test_core.py b/keystone/tests/unit/resource/test_core.py index 0df43f18f7..d2ff52e05f 100644 --- a/keystone/tests/unit/resource/test_core.py +++ b/keystone/tests/unit/resource/test_core.py @@ -36,7 +36,8 @@ class TestResourceManagerNoFixtures(unit.SQLDriverOverrides, unit.TestCase): self.useFixture(database.Database()) self.load_backends() PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) def test_update_project_name_conflict(self): name = uuid.uuid4().hex @@ -47,20 +48,25 @@ class TestResourceManagerNoFixtures(unit.SQLDriverOverrides, unit.TestCase): 'description': description, } domain = PROVIDERS.resource_api.create_domain( - CONF.identity.default_domain_id, domain_attrs) - project1 = unit.new_project_ref(domain_id=domain['id'], - name=uuid.uuid4().hex) + CONF.identity.default_domain_id, domain_attrs + ) + project1 = unit.new_project_ref( + domain_id=domain['id'], name=uuid.uuid4().hex + ) PROVIDERS.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id'], - name=uuid.uuid4().hex) + project2 = unit.new_project_ref( + domain_id=domain['id'], name=uuid.uuid4().hex + ) project = PROVIDERS.resource_api.create_project( project2['id'], project2 ) - self.assertRaises(exception.Conflict, - PROVIDERS.resource_api.update_project, - project['id'], {'name': project1['name'], - 'id': project['id']}) + self.assertRaises( + exception.Conflict, + PROVIDERS.resource_api.update_project, + project['id'], + {'name': project1['name'], 'id': project['id']}, + ) class DomainConfigDriverTests(object): @@ -70,27 +76,38 @@ class DomainConfigDriverTests(object): group = uuid.uuid4().hex option = uuid.uuid4().hex value = uuid.uuid4().hex - config = {'group': group, 'option': option, 'value': value, - 'sensitive': sensitive} + config = { + 'group': group, + 'option': option, + 'value': value, + 'sensitive': sensitive, + } self.driver.create_config_options(domain, [config]) - res = self.driver.get_config_option( - domain, group, option, sensitive) + res = self.driver.get_config_option(domain, group, option, sensitive) config.pop('sensitive') self.assertEqual(config, res) value = uuid.uuid4().hex - config = {'group': group, 'option': option, 'value': value, - 'sensitive': sensitive} + config = { + 'group': group, + 'option': option, + 'value': value, + 'sensitive': sensitive, + } self.driver.update_config_options(domain, [config]) - res = self.driver.get_config_option( - domain, group, option, sensitive) + res = self.driver.get_config_option(domain, group, option, sensitive) config.pop('sensitive') self.assertEqual(config, res) self.driver.delete_config_options(domain, group, option) - self.assertRaises(exception.DomainConfigNotFound, - self.driver.get_config_option, - domain, group, option, sensitive) + self.assertRaises( + exception.DomainConfigNotFound, + self.driver.get_config_option, + domain, + group, + option, + sensitive, + ) # ...and silent if we try to delete it again self.driver.delete_config_options(domain, group, option) @@ -102,38 +119,52 @@ class DomainConfigDriverTests(object): def _list_domain_config(self, sensitive): """Test listing by combination of domain, group & option.""" - config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex, 'sensitive': sensitive} + config1 = { + 'group': uuid.uuid4().hex, + 'option': uuid.uuid4().hex, + 'value': uuid.uuid4().hex, + 'sensitive': sensitive, + } # Put config2 in the same group as config1 - config2 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex, 'sensitive': sensitive} - config3 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': 100, 'sensitive': sensitive} + config2 = { + 'group': config1['group'], + 'option': uuid.uuid4().hex, + 'value': uuid.uuid4().hex, + 'sensitive': sensitive, + } + config3 = { + 'group': uuid.uuid4().hex, + 'option': uuid.uuid4().hex, + 'value': 100, + 'sensitive': sensitive, + } domain = uuid.uuid4().hex - self.driver.create_config_options( - domain, [config1, config2, config3]) + self.driver.create_config_options(domain, [config1, config2, config3]) for config in [config1, config2, config3]: config.pop('sensitive') # Try listing all items from a domain - res = self.driver.list_config_options( - domain, sensitive=sensitive) + res = self.driver.list_config_options(domain, sensitive=sensitive) self.assertThat(res, matchers.HasLength(3)) for res_entry in res: self.assertIn(res_entry, [config1, config2, config3]) # Try listing by domain and group res = self.driver.list_config_options( - domain, group=config1['group'], sensitive=sensitive) + domain, group=config1['group'], sensitive=sensitive + ) self.assertThat(res, matchers.HasLength(2)) for res_entry in res: self.assertIn(res_entry, [config1, config2]) # Try listing by domain, group and option res = self.driver.list_config_options( - domain, group=config2['group'], - option=config2['option'], sensitive=sensitive) + domain, + group=config2['group'], + option=config2['option'], + sensitive=sensitive, + ) self.assertThat(res, matchers.HasLength(1)) self.assertEqual(config2, res[0]) @@ -145,25 +176,43 @@ class DomainConfigDriverTests(object): def _delete_domain_configs(self, sensitive): """Test deleting by combination of domain, group & option.""" - config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex, 'sensitive': sensitive} + config1 = { + 'group': uuid.uuid4().hex, + 'option': uuid.uuid4().hex, + 'value': uuid.uuid4().hex, + 'sensitive': sensitive, + } # Put config2 and config3 in the same group as config1 - config2 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex, 'sensitive': sensitive} - config3 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex, 'sensitive': sensitive} - config4 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex, 'sensitive': sensitive} + config2 = { + 'group': config1['group'], + 'option': uuid.uuid4().hex, + 'value': uuid.uuid4().hex, + 'sensitive': sensitive, + } + config3 = { + 'group': config1['group'], + 'option': uuid.uuid4().hex, + 'value': uuid.uuid4().hex, + 'sensitive': sensitive, + } + config4 = { + 'group': uuid.uuid4().hex, + 'option': uuid.uuid4().hex, + 'value': uuid.uuid4().hex, + 'sensitive': sensitive, + } domain = uuid.uuid4().hex self.driver.create_config_options( - domain, [config1, config2, config3, config4]) + domain, [config1, config2, config3, config4] + ) for config in [config1, config2, config3, config4]: config.pop('sensitive') # Try deleting by domain, group and option res = self.driver.delete_config_options( - domain, group=config2['group'], option=config2['option']) + domain, group=config2['group'], option=config2['option'] + ) res = self.driver.list_config_options(domain, sensitive=sensitive) self.assertThat(res, matchers.HasLength(3)) for res_entry in res: @@ -189,15 +238,20 @@ class DomainConfigDriverTests(object): def _create_domain_config_twice(self, sensitive): """Test create the same option twice just overwrites.""" - config = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex, 'sensitive': sensitive} + config = { + 'group': uuid.uuid4().hex, + 'option': uuid.uuid4().hex, + 'value': uuid.uuid4().hex, + 'sensitive': sensitive, + } domain = uuid.uuid4().hex self.driver.create_config_options(domain, [config]) config['value'] = uuid.uuid4().hex self.driver.create_config_options(domain, [config]) res = self.driver.get_config_option( - domain, config['group'], config['option'], sensitive) + domain, config['group'], config['option'], sensitive + ) config.pop('sensitive') self.assertEqual(config, res) @@ -224,9 +278,13 @@ class DomainConfigTests(object): del self.domain def test_create_domain_config_including_sensitive_option(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + } + } PROVIDERS.domain_config_api.create_config(self.domain['id'], config) # password is sensitive, so check that the whitelisted portion and @@ -236,19 +294,25 @@ class DomainConfigTests(object): config_whitelisted['ldap'].pop('password') self.assertEqual(config_whitelisted, res) res = PROVIDERS.domain_config_api.driver.get_config_option( - self.domain['id'], 'ldap', 'password', sensitive=True) + self.domain['id'], 'ldap', 'password', sensitive=True + ) self.assertEqual(config['ldap']['password'], res['value']) # Finally, use the non-public API to get back the whole config res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) + self.domain['id'] + ) self.assertEqual(config, res) def test_get_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } PROVIDERS.domain_config_api.create_config(self.domain['id'], config) res = PROVIDERS.domain_config_api.get_config( @@ -258,7 +322,8 @@ class DomainConfigTests(object): config_partial.pop('ldap') self.assertEqual(config_partial, res) res = PROVIDERS.domain_config_api.get_config( - self.domain['id'], group='ldap', option='user_tree_dn') + self.domain['id'], group='ldap', option='user_tree_dn' + ) self.assertEqual({'user_tree_dn': config['ldap']['user_tree_dn']}, res) # ...but we should fail to get a sensitive option self.assertRaises( @@ -266,18 +331,23 @@ class DomainConfigTests(object): PROVIDERS.domain_config_api.get_config, self.domain['id'], group='ldap', - option='password' + option='password', ) def test_delete_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } PROVIDERS.domain_config_api.create_config(self.domain['id'], config) PROVIDERS.domain_config_api.delete_config( - self.domain['id'], group='identity') + self.domain['id'], group='identity' + ) config_partial = copy.deepcopy(config) config_partial.pop('identity') config_partial['ldap'].pop('password') @@ -285,7 +355,8 @@ class DomainConfigTests(object): self.assertEqual(config_partial, res) PROVIDERS.domain_config_api.delete_config( - self.domain['id'], group='ldap', option='url') + self.domain['id'], group='ldap', option='url' + ) config_partial = copy.deepcopy(config_partial) config_partial['ldap'].pop('url') res = PROVIDERS.domain_config_api.get_config(self.domain['id']) @@ -295,7 +366,7 @@ class DomainConfigTests(object): self.assertRaises( exception.DomainConfigNotFound, PROVIDERS.domain_config_api.get_config, - self.domain['id'] + self.domain['id'], ) config = {'ldap': {'url': uuid.uuid4().hex}} @@ -305,52 +376,66 @@ class DomainConfigTests(object): exception.DomainConfigNotFound, PROVIDERS.domain_config_api.get_config, self.domain['id'], - group='identity' + group='identity', ) self.assertRaises( exception.DomainConfigNotFound, PROVIDERS.domain_config_api.get_config, self.domain['id'], group='ldap', - option='user_tree_dn' + option='user_tree_dn', ) def test_get_sensitive_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) + self.domain['id'] + ) self.assertEqual({}, res) PROVIDERS.domain_config_api.create_config(self.domain['id'], config) res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) + self.domain['id'] + ) self.assertEqual(config, res) def test_update_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } PROVIDERS.domain_config_api.create_config(self.domain['id'], config) # Try updating a group - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} + new_config = { + 'ldap': {'url': uuid.uuid4().hex, 'user_filter': uuid.uuid4().hex} + } res = PROVIDERS.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap') + self.domain['id'], new_config, group='ldap' + ) expected_config = copy.deepcopy(config) expected_config['ldap']['url'] = new_config['ldap']['url'] - expected_config['ldap']['user_filter'] = ( - new_config['ldap']['user_filter']) + expected_config['ldap']['user_filter'] = new_config['ldap'][ + 'user_filter' + ] expected_full_config = copy.deepcopy(expected_config) expected_config['ldap'].pop('password') res = PROVIDERS.domain_config_api.get_config(self.domain['id']) self.assertEqual(expected_config, res) # The sensitive option should still exist res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) + self.domain['id'] + ) self.assertEqual(expected_full_config, res) # Try updating a single whitelisted option @@ -358,7 +443,8 @@ class DomainConfigTests(object): PROVIDERS.domain_config_api.create_config(self.domain['id'], config) new_config = {'url': uuid.uuid4().hex} res = PROVIDERS.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap', option='url') + self.domain['id'], new_config, group='ldap', option='url' + ) # Make sure whitelisted and full config is updated expected_whitelisted_config = copy.deepcopy(config) @@ -369,7 +455,8 @@ class DomainConfigTests(object): res = PROVIDERS.domain_config_api.get_config(self.domain['id']) self.assertEqual(expected_whitelisted_config, res) res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) + self.domain['id'] + ) self.assertEqual(expected_full_config, res) # Try updating a single sensitive option @@ -377,7 +464,8 @@ class DomainConfigTests(object): PROVIDERS.domain_config_api.create_config(self.domain['id'], config) new_config = {'password': uuid.uuid4().hex} res = PROVIDERS.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap', option='password') + self.domain['id'], new_config, group='ldap', option='password' + ) # The whitelisted config should not have changed... expected_whitelisted_config = copy.deepcopy(config) expected_full_config = copy.deepcopy(config) @@ -387,116 +475,179 @@ class DomainConfigTests(object): self.assertEqual(expected_whitelisted_config, res) expected_full_config['ldap']['password'] = new_config['password'] res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) + self.domain['id'] + ) # ...but the sensitive piece should have. self.assertEqual(expected_full_config, res) def test_update_invalid_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } # An extra group, when specifying one group should fail - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.update_config, - self.domain['id'], config, group='ldap') + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.update_config, + self.domain['id'], + config, + group='ldap', + ) # An extra option, when specifying one option should fail - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.update_config, - self.domain['id'], config['ldap'], - group='ldap', option='url') + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.update_config, + self.domain['id'], + config['ldap'], + group='ldap', + option='url', + ) # Now try the right number of groups/options, but just not # ones that are in the config provided config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.update_config, - self.domain['id'], config, group='identity') - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.update_config, - self.domain['id'], config['ldap'], group='ldap', - option='url') + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.update_config, + self.domain['id'], + config, + group='identity', + ) + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.update_config, + self.domain['id'], + config['ldap'], + group='ldap', + option='url', + ) # Now some valid groups/options, but just not ones that are in the # existing config config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config(self.domain['id'], config) config_wrong_group = {'identity': {'driver': uuid.uuid4().hex}} - self.assertRaises(exception.DomainConfigNotFound, - PROVIDERS.domain_config_api.update_config, - self.domain['id'], config_wrong_group, - group='identity') + self.assertRaises( + exception.DomainConfigNotFound, + PROVIDERS.domain_config_api.update_config, + self.domain['id'], + config_wrong_group, + group='identity', + ) config_wrong_option = {'url': uuid.uuid4().hex} - self.assertRaises(exception.DomainConfigNotFound, - PROVIDERS.domain_config_api.update_config, - self.domain['id'], config_wrong_option, - group='ldap', option='url') + self.assertRaises( + exception.DomainConfigNotFound, + PROVIDERS.domain_config_api.update_config, + self.domain['id'], + config_wrong_option, + group='ldap', + option='url', + ) # And finally just some bad groups/options bad_group = uuid.uuid4().hex config = {bad_group: {'user': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.update_config, - self.domain['id'], config, group=bad_group, - option='user') + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.update_config, + self.domain['id'], + config, + group=bad_group, + option='user', + ) bad_option = uuid.uuid4().hex config = {'ldap': {bad_option: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.update_config, - self.domain['id'], config, group='ldap', - option=bad_option) + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.update_config, + self.domain['id'], + config, + group='ldap', + option=bad_option, + ) def test_create_invalid_domain_config(self): - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.create_config, - self.domain['id'], {}) + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.create_config, + self.domain['id'], + {}, + ) config = {uuid.uuid4().hex: uuid.uuid4().hex} - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.create_config, - self.domain['id'], config) + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.create_config, + self.domain['id'], + config, + ) config = {uuid.uuid4().hex: {uuid.uuid4().hex: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.create_config, - self.domain['id'], config) + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.create_config, + self.domain['id'], + config, + ) config = {'ldap': {uuid.uuid4().hex: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.create_config, - self.domain['id'], config) + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.create_config, + self.domain['id'], + config, + ) # Try an option that IS in the standard conf, but neither whitelisted # or marked as sensitive config = {'identity': {'user_tree_dn': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - PROVIDERS.domain_config_api.create_config, - self.domain['id'], config) + self.assertRaises( + exception.InvalidDomainConfig, + PROVIDERS.domain_config_api.create_config, + self.domain['id'], + config, + ) def test_delete_invalid_partial_domain_config(self): config = {'ldap': {'url': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config(self.domain['id'], config) # Try deleting a group not in the config - self.assertRaises(exception.DomainConfigNotFound, - PROVIDERS.domain_config_api.delete_config, - self.domain['id'], group='identity') + self.assertRaises( + exception.DomainConfigNotFound, + PROVIDERS.domain_config_api.delete_config, + self.domain['id'], + group='identity', + ) # Try deleting an option not in the config - self.assertRaises(exception.DomainConfigNotFound, - PROVIDERS.domain_config_api.delete_config, - self.domain['id'], - group='ldap', option='user_tree_dn') + self.assertRaises( + exception.DomainConfigNotFound, + PROVIDERS.domain_config_api.delete_config, + self.domain['id'], + group='ldap', + option='user_tree_dn', + ) def test_sensitive_substitution_in_domain_config(self): # Create a config that contains a whitelisted option that requires # substitution of a sensitive option. - config = {'ldap': {'url': 'my_url/%(password)s', - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': 'my_url/%(password)s', + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } PROVIDERS.domain_config_api.create_config(self.domain['id'], config) # Read back the config with the internal method and ensure that the # substitution has taken place. res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - expected_url = ( - config['ldap']['url'] % {'password': config['ldap']['password']}) + self.domain['id'] + ) + expected_url = config['ldap']['url'] % { + 'password': config['ldap']['password'] + } self.assertEqual(expected_url, res['ldap']['url']) def test_invalid_sensitive_substitution_in_domain_config(self): @@ -504,17 +655,23 @@ class DomainConfigTests(object): mock_log = mock.Mock() invalid_option_config = { - 'ldap': {'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + 'ldap': { + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } - for invalid_option in ['my_url/%(passssword)s', - 'my_url/%(password', - 'my_url/%(password)', - 'my_url/%(password)d']: + for invalid_option in [ + 'my_url/%(passssword)s', + 'my_url/%(password', + 'my_url/%(password)', + 'my_url/%(password)d', + ]: invalid_option_config['ldap']['url'] = invalid_option PROVIDERS.domain_config_api.create_config( - self.domain['id'], invalid_option_config) + self.domain['id'], invalid_option_config + ) with mock.patch('keystone.resource.core.LOG', mock_log): res = ( @@ -524,60 +681,81 @@ class DomainConfigTests(object): ) mock_log.warning.assert_any_call(mock.ANY, mock.ANY) self.assertEqual( - invalid_option_config['ldap']['url'], res['ldap']['url']) + invalid_option_config['ldap']['url'], res['ldap']['url'] + ) def test_escaped_sequence_in_domain_config(self): """Check that escaped '%(' doesn't get interpreted.""" mock_log = mock.Mock() escaped_option_config = { - 'ldap': {'url': 'my_url/%%(password)s', - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + 'ldap': { + 'url': 'my_url/%%(password)s', + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } PROVIDERS.domain_config_api.create_config( - self.domain['id'], escaped_option_config) + self.domain['id'], escaped_option_config + ) with mock.patch('keystone.resource.core.LOG', mock_log): res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) + self.domain['id'] + ) self.assertFalse(mock_log.warn.called) # The escaping '%' should have been removed self.assertEqual('my_url/%(password)s', res['ldap']['url']) @unit.skip_if_cache_disabled('domain_config') def test_cache_layer_get_sensitive_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } PROVIDERS.domain_config_api.create_config(self.domain['id'], config) # cache the result res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) + self.domain['id'] + ) self.assertEqual(config, res) # delete, bypassing domain config manager api PROVIDERS.domain_config_api.delete_config_options(self.domain['id']) self.assertDictEqual( - res, PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id'])) + res, + PROVIDERS.domain_config_api.get_config_with_sensitive_info( + self.domain['id'] + ), + ) PROVIDERS.domain_config_api.get_config_with_sensitive_info.invalidate( - PROVIDERS.domain_config_api, self.domain['id']) + PROVIDERS.domain_config_api, self.domain['id'] + ) self.assertDictEqual( {}, PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domain['id'])) + self.domain['id'] + ), + ) def test_delete_domain_deletes_configs(self): """Test domain deletion clears the domain configs.""" domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}} + config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + 'password': uuid.uuid4().hex, + } + } PROVIDERS.domain_config_api.create_config(domain['id'], config) # Now delete the domain @@ -589,64 +767,83 @@ class DomainConfigTests(object): self.assertRaises( exception.DomainConfigNotFound, PROVIDERS.domain_config_api.get_config, - domain['id']) + domain['id'], + ) # The get_config_with_sensitive_info does not throw an exception if # the config is empty, it just returns an empty dict self.assertDictEqual( {}, PROVIDERS.domain_config_api.get_config_with_sensitive_info( - domain['id'])) + domain['id'] + ), + ) def test_config_registration(self): type = uuid.uuid4().hex PROVIDERS.domain_config_api.obtain_registration( - self.domain['id'], type) + self.domain['id'], type + ) PROVIDERS.domain_config_api.release_registration( - self.domain['id'], type=type) + self.domain['id'], type=type + ) # Make sure that once someone has it, nobody else can get it. # This includes the domain who already has it. PROVIDERS.domain_config_api.obtain_registration( - self.domain['id'], type) + self.domain['id'], type + ) self.assertFalse( PROVIDERS.domain_config_api.obtain_registration( - self.domain['id'], type)) + self.domain['id'], type + ) + ) # Make sure we can read who does have it self.assertEqual( self.domain['id'], - PROVIDERS.domain_config_api.read_registration(type)) + PROVIDERS.domain_config_api.read_registration(type), + ) # Make sure releasing it is silent if the domain specified doesn't # have the registration domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} PROVIDERS.resource_api.create_domain(domain2['id'], domain2) PROVIDERS.domain_config_api.release_registration( - domain2['id'], type=type) + domain2['id'], type=type + ) # If nobody has the type registered, then trying to read it should # raise ConfigRegistrationNotFound PROVIDERS.domain_config_api.release_registration( - self.domain['id'], type=type) - self.assertRaises(exception.ConfigRegistrationNotFound, - PROVIDERS.domain_config_api.read_registration, - type) + self.domain['id'], type=type + ) + self.assertRaises( + exception.ConfigRegistrationNotFound, + PROVIDERS.domain_config_api.read_registration, + type, + ) # Finally check multiple registrations are cleared if you free the # registration without specifying the type type2 = uuid.uuid4().hex PROVIDERS.domain_config_api.obtain_registration( - self.domain['id'], type) + self.domain['id'], type + ) PROVIDERS.domain_config_api.obtain_registration( - self.domain['id'], type2) + self.domain['id'], type2 + ) PROVIDERS.domain_config_api.release_registration(self.domain['id']) - self.assertRaises(exception.ConfigRegistrationNotFound, - PROVIDERS.domain_config_api.read_registration, - type) - self.assertRaises(exception.ConfigRegistrationNotFound, - PROVIDERS.domain_config_api.read_registration, - type2) + self.assertRaises( + exception.ConfigRegistrationNotFound, + PROVIDERS.domain_config_api.read_registration, + type, + ) + self.assertRaises( + exception.ConfigRegistrationNotFound, + PROVIDERS.domain_config_api.read_registration, + type2, + ) def test_option_dict_fails_when_group_is_none(self): group = 'foo' @@ -655,7 +852,7 @@ class DomainConfigTests(object): cfg.NoSuchOptError, PROVIDERS.domain_config_api._option_dict, group, - option + option, ) def test_option_dict_returns_valid_config_values(self): @@ -666,7 +863,7 @@ class DomainConfigTests(object): expected_dict = { 'group': 'security_compliance', 'option': 'password_regex', - 'value': regex + 'value': regex, } option_dict = PROVIDERS.domain_config_api._option_dict( 'security_compliance', 'password_regex' diff --git a/keystone/tests/unit/rest.py b/keystone/tests/unit/rest.py index 378123575a..fb1fe4994f 100644 --- a/keystone/tests/unit/rest.py +++ b/keystone/tests/unit/rest.py @@ -61,18 +61,26 @@ class RestfulTestCase(unit.TestCase): self.load_backends() self.load_fixtures(default_fixtures) - self.public_app = webtest.TestApp( - self.loadapp(name='public')) + self.public_app = webtest.TestApp(self.loadapp(name='public')) self.addCleanup(delattr, self, 'public_app') def auth_plugin_config_override(self, methods=None, **method_classes): self.useFixture( - ksfixtures.ConfigAuthPlugins(self.config_fixture, - methods, - **method_classes)) + ksfixtures.ConfigAuthPlugins( + self.config_fixture, methods, **method_classes + ) + ) - def request(self, app, path, body=None, headers=None, token=None, - expected_status=None, **kwargs): + def request( + self, + app, + path, + body=None, + headers=None, + token=None, + expected_status=None, + **kwargs + ): if headers: headers = {str(k): str(v) for k, v in headers.items()} else: @@ -84,9 +92,9 @@ class RestfulTestCase(unit.TestCase): # sets environ['REMOTE_ADDR'] kwargs.setdefault('remote_addr', 'localhost') - response = app.request(path, headers=headers, - status=expected_status, body=body, - **kwargs) + response = app.request( + path, headers=headers, status=expected_status, body=body, **kwargs + ) return response @@ -102,8 +110,9 @@ class RestfulTestCase(unit.TestCase): """ self.assertTrue( 200 <= response.status_code <= 299, - 'Status code %d is outside of the expected range (2xx)\n\n%s' % - (response.status, response.body)) + 'Status code %d is outside of the expected range (2xx)\n\n%s' + % (response.status, response.body), + ) def assertResponseStatus(self, response, expected_status): """Assert a specific status code on the response. @@ -116,16 +125,19 @@ class RestfulTestCase(unit.TestCase): self.assertResponseStatus(response, http.client.NO_CONTENT) """ self.assertEqual( - expected_status, response.status_code, - 'Status code %s is not %s, as expected\n\n%s' % - (response.status_code, expected_status, response.body)) + expected_status, + response.status_code, + 'Status code %s is not %s, as expected\n\n%s' + % (response.status_code, expected_status, response.body), + ) def assertValidResponseHeaders(self, response): """Ensure that response headers appear as expected.""" self.assertIn('X-Auth-Token', response.headers.get('Vary')) - def assertValidErrorResponse(self, response, - expected_status=http.client.BAD_REQUEST): + def assertValidErrorResponse( + self, response, expected_status=http.client.BAD_REQUEST + ): """Verify that the error response is valid. Subclasses can override this function based on the expected response. @@ -164,9 +176,15 @@ class RestfulTestCase(unit.TestCase): else: response.result = response.body - def restful_request(self, method='GET', headers=None, body=None, - content_type=None, response_content_type=None, - **kwargs): + def restful_request( + self, + method='GET', + headers=None, + body=None, + content_type=None, + response_content_type=None, + **kwargs + ): """Serialize/deserialize json as request/response body. .. WARNING:: @@ -181,15 +199,18 @@ class RestfulTestCase(unit.TestCase): body = self._to_content_type(body, headers, content_type) # Perform the HTTP request/response - response = self.request(method=method, headers=headers, body=body, - **kwargs) + response = self.request( + method=method, headers=headers, body=body, **kwargs + ) response_content_type = response_content_type or content_type self._from_content_type(response, content_type=response_content_type) # we can save some code & improve coverage by always doing this - if (method != 'HEAD' and - response.status_code >= http.client.BAD_REQUEST): + if ( + method != 'HEAD' + and response.status_code >= http.client.BAD_REQUEST + ): self.assertValidErrorResponse(response) # Contains the decoded response.body diff --git a/keystone/tests/unit/server/test_keystone_flask.py b/keystone/tests/unit/server/test_keystone_flask.py index 644e6fa2c7..b90b736ca9 100644 --- a/keystone/tests/unit/server/test_keystone_flask.py +++ b/keystone/tests/unit/server/test_keystone_flask.py @@ -56,7 +56,8 @@ class _TestResourceWithCollectionInfo(flask_common.ResourceBase): def get(self, argument_id=None): # List with no argument, get resource with id, used for HEAD as well. rbac_enforcer.enforcer.RBACEnforcer.enforce_call( - action='example:allowed') + action='example:allowed' + ) if argument_id is None: # List return self._list_arguments() @@ -69,7 +70,8 @@ class _TestResourceWithCollectionInfo(flask_common.ResourceBase): def post(self): rbac_enforcer.enforcer.RBACEnforcer.enforce_call( - action='example:allowed') + action='example:allowed' + ) ref = flask.request.get_json(force=True) ref = self._assign_unique_id(ref) self._storage_dict[ref['id']] = ref @@ -77,7 +79,8 @@ class _TestResourceWithCollectionInfo(flask_common.ResourceBase): def put(self, argument_id): rbac_enforcer.enforcer.RBACEnforcer.enforce_call( - action='example:allowed') + action='example:allowed' + ) try: self._storage_dict[argument_id] except KeyError: @@ -91,7 +94,8 @@ class _TestResourceWithCollectionInfo(flask_common.ResourceBase): def patch(self, argument_id): rbac_enforcer.enforcer.RBACEnforcer.enforce_call( - action='example:allowed') + action='example:allowed' + ) try: self._storage_dict[argument_id] except KeyError: @@ -103,7 +107,8 @@ class _TestResourceWithCollectionInfo(flask_common.ResourceBase): def delete(self, argument_id): rbac_enforcer.enforcer.RBACEnforcer.enforce_call( - action='example:allowed') + action='example:allowed' + ) try: del self._storage_dict[argument_id] except KeyError: @@ -119,22 +124,17 @@ class _TestRestfulAPI(flask_common.APIBase): def __init__(self, *args, **kwargs): self.resource_mapping = kwargs.pop('resource_mapping', []) - self.resources = kwargs.pop('resources', - [_TestResourceWithCollectionInfo]) + self.resources = kwargs.pop( + 'resources', [_TestResourceWithCollectionInfo] + ) super(_TestRestfulAPI, self).__init__(*args, **kwargs) class TestKeystoneFlaskCommon(rest.RestfulTestCase): _policy_rules = [ - policy.RuleDefault( - name='example:allowed', - check_str='' - ), - policy.RuleDefault( - name='example:deny', - check_str='false:false' - ) + policy.RuleDefault(name='example:allowed', check_str=''), + policy.RuleDefault(name='example:deny', check_str='false:false'), ] def setUp(self): @@ -144,16 +144,20 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): def register_rules(enf_obj): enf_obj.register_defaults(self._policy_rules) - self.useFixture(fixtures.MockPatchObject( - enf, 'register_rules', register_rules)) - self.useFixture(fixtures.MockPatchObject( - rbac_enforcer.enforcer, '_POSSIBLE_TARGET_ACTIONS', - {r.name for r in self._policy_rules})) + self.useFixture( + fixtures.MockPatchObject(enf, 'register_rules', register_rules) + ) + self.useFixture( + fixtures.MockPatchObject( + rbac_enforcer.enforcer, + '_POSSIBLE_TARGET_ACTIONS', + {r.name for r in self._policy_rules}, + ) + ) enf._reset() self.addCleanup(enf._reset) - self.addCleanup( - _TestResourceWithCollectionInfo._reset) + self.addCleanup(_TestResourceWithCollectionInfo._reset) def _get_token(self): auth_json = { @@ -164,32 +168,31 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): 'user': { 'name': self.user_req_admin['name'], 'password': self.user_req_admin['password'], - 'domain': { - 'id': self.user_req_admin['domain_id'] - } + 'domain': {'id': self.user_req_admin['domain_id']}, } - } + }, }, - 'scope': { - 'project': { - 'id': self.project_service['id'] - } - } + 'scope': {'project': {'id': self.project_service['id']}}, } } - return self.test_client().post( - '/v3/auth/tokens', - json=auth_json, - expected_status_code=201).headers['X-Subject-Token'] + return ( + self.test_client() + .post('/v3/auth/tokens', json=auth_json, expected_status_code=201) + .headers['X-Subject-Token'] + ) def _setup_flask_restful_api(self, **options): self.restful_api_opts = options.copy() orig_value = _TestResourceWithCollectionInfo.api_prefix - setattr(_TestResourceWithCollectionInfo, - 'api_prefix', options.get('api_url_prefix', '')) - self.addCleanup(setattr, _TestResourceWithCollectionInfo, 'api_prefix', - orig_value) + setattr( + _TestResourceWithCollectionInfo, + 'api_prefix', + options.get('api_url_prefix', ''), + ) + self.addCleanup( + setattr, _TestResourceWithCollectionInfo, 'api_prefix', orig_value + ) self.restful_api = _TestRestfulAPI(**options) self.public_app.app.register_blueprint(self.restful_api.blueprint) self.cleanup_instance('restful_api') @@ -200,124 +203,143 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): api_prefix = self.restful_api_opts.get('api_url_prefix', '') blueprint_prefix = self.restful_api._blueprint_url_prefix.rstrip('/') url = ''.join( - [x for x in [blueprint_prefix, api_prefix, path_base] if x]) + [x for x in [blueprint_prefix, api_prefix, path_base] if x] + ) headers = {'X-Auth-Token': self._get_token()} with self.test_client() as c: # GET LIST resp = c.get(url, headers=headers) self.assertEqual( - _TestResourceWithCollectionInfo.wrap_collection( - []), resp.json) + _TestResourceWithCollectionInfo.wrap_collection([]), resp.json + ) unknown_id = uuid.uuid4().hex # GET non-existent ref - c.get('%s/%s' % (url, unknown_id), headers=headers, - expected_status_code=404) + c.get( + '%s/%s' % (url, unknown_id), + headers=headers, + expected_status_code=404, + ) # HEAD non-existent ref - c.head('%s/%s' % (url, unknown_id), headers=headers, - expected_status_code=404) + c.head( + '%s/%s' % (url, unknown_id), + headers=headers, + expected_status_code=404, + ) # PUT non-existent ref - c.put('%s/%s' % (url, unknown_id), json={}, headers=headers, - expected_status_code=404) + c.put( + '%s/%s' % (url, unknown_id), + json={}, + headers=headers, + expected_status_code=404, + ) # PATCH non-existent ref - c.patch('%s/%s' % (url, unknown_id), json={}, headers=headers, - expected_status_code=404) + c.patch( + '%s/%s' % (url, unknown_id), + json={}, + headers=headers, + expected_status_code=404, + ) # DELETE non-existent ref - c.delete('%s/%s' % (url, unknown_id), headers=headers, - expected_status_code=404) + c.delete( + '%s/%s' % (url, unknown_id), + headers=headers, + expected_status_code=404, + ) # POST new ref new_argument_resource = {'testing': uuid.uuid4().hex} new_argument_resp = c.post( - url, - json=new_argument_resource, - headers=headers).json['argument'] + url, json=new_argument_resource, headers=headers + ).json['argument'] # POST second new ref new_argument2_resource = {'testing': uuid.uuid4().hex} new_argument2_resp = c.post( - url, - json=new_argument2_resource, - headers=headers).json['argument'] + url, json=new_argument2_resource, headers=headers + ).json['argument'] # GET list get_list_resp = c.get(url, headers=headers).json - self.assertIn(new_argument_resp, - get_list_resp['arguments']) - self.assertIn(new_argument2_resp, - get_list_resp['arguments']) + self.assertIn(new_argument_resp, get_list_resp['arguments']) + self.assertIn(new_argument2_resp, get_list_resp['arguments']) # GET first ref - get_resp = c.get('%s/%s' % (url, new_argument_resp['id']), - headers=headers).json['argument'] + get_resp = c.get( + '%s/%s' % (url, new_argument_resp['id']), headers=headers + ).json['argument'] self.assertEqual(new_argument_resp, get_resp) # HEAD first ref head_resp = c.head( - '%s/%s' % (url, new_argument_resp['id']), - headers=headers).data + '%s/%s' % (url, new_argument_resp['id']), headers=headers + ).data # NOTE(morgan): For python3 compat, explicitly binary type self.assertEqual(head_resp, b'') # PUT update first ref replacement_argument = {'new_arg': True, 'id': uuid.uuid4().hex} - c.put('%s/%s' % (url, new_argument_resp['id']), headers=headers, - json=replacement_argument, expected_status_code=400) + c.put( + '%s/%s' % (url, new_argument_resp['id']), + headers=headers, + json=replacement_argument, + expected_status_code=400, + ) replacement_argument.pop('id') - c.put('%s/%s' % (url, new_argument_resp['id']), - headers=headers, - json=replacement_argument) - put_resp = c.get('%s/%s' % (url, new_argument_resp['id']), - headers=headers).json['argument'] - self.assertNotIn(new_argument_resp['testing'], - put_resp) + c.put( + '%s/%s' % (url, new_argument_resp['id']), + headers=headers, + json=replacement_argument, + ) + put_resp = c.get( + '%s/%s' % (url, new_argument_resp['id']), headers=headers + ).json['argument'] + self.assertNotIn(new_argument_resp['testing'], put_resp) self.assertTrue(put_resp['new_arg']) # GET first ref (check for replacement) get_replacement_resp = c.get( - '%s/%s' % (url, new_argument_resp['id']), - headers=headers).json['argument'] - self.assertEqual(put_resp, - get_replacement_resp) + '%s/%s' % (url, new_argument_resp['id']), headers=headers + ).json['argument'] + self.assertEqual(put_resp, get_replacement_resp) # PATCH update first ref patch_ref = {'uuid': uuid.uuid4().hex} - patch_resp = c.patch('%s/%s' % (url, new_argument_resp['id']), - headers=headers, - json=patch_ref).json['argument'] + patch_resp = c.patch( + '%s/%s' % (url, new_argument_resp['id']), + headers=headers, + json=patch_ref, + ).json['argument'] self.assertTrue(patch_resp['new_arg']) self.assertEqual(patch_ref['uuid'], patch_resp['uuid']) # GET first ref (check for update) get_patched_ref_resp = c.get( - '%s/%s' % (url, new_argument_resp['id']), - headers=headers).json['argument'] - self.assertEqual(patch_resp, - get_patched_ref_resp) + '%s/%s' % (url, new_argument_resp['id']), headers=headers + ).json['argument'] + self.assertEqual(patch_resp, get_patched_ref_resp) # DELETE first ref - c.delete( - '%s/%s' % (url, new_argument_resp['id']), - headers=headers) + c.delete('%s/%s' % (url, new_argument_resp['id']), headers=headers) # Check that it was in-fact deleted c.get( '%s/%s' % (url, new_argument_resp['id']), - headers=headers, expected_status_code=404) + headers=headers, + expected_status_code=404, + ) def test_api_url_prefix(self): url_prefix = '/%s' % uuid.uuid4().hex - self._setup_flask_restful_api( - api_url_prefix=url_prefix) + self._setup_flask_restful_api(api_url_prefix=url_prefix) self._make_requests() def test_blueprint_url_prefix(self): url_prefix = '/%s' % uuid.uuid4().hex - self._setup_flask_restful_api( - blueprint_url_prefix=url_prefix) + self._setup_flask_restful_api(blueprint_url_prefix=url_prefix) self._make_requests() def test_build_restful_api_no_prefix(self): @@ -370,7 +392,8 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): functions = functions or [] functions.append(do_something) super(TestAPI, self)._register_before_request_functions( - functions) + functions + ) api = TestAPI(resources=[_TestResourceWithCollectionInfo]) self.public_app.app.register_blueprint(api.blueprint) @@ -394,29 +417,36 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): functions = functions or [] functions.append(do_something) super(TestAPI, self)._register_after_request_functions( - functions) + functions + ) api = TestAPI(resources=[_TestResourceWithCollectionInfo]) self.public_app.app.register_blueprint(api.blueprint) token = self._get_token() with self.test_client() as c: - c.get('/v3/arguments', headers={'X-Auth-Token': token}, - expected_status_code=420) + c.get( + '/v3/arguments', + headers={'X-Auth-Token': token}, + expected_status_code=420, + ) def test_construct_resource_map(self): resource_name = 'arguments' - param_relation = json_home.build_v3_parameter_relation( - 'argument_id') + param_relation = json_home.build_v3_parameter_relation('argument_id') alt_rel_func = functools.partial( json_home.build_v3_extension_resource_relation, - extension_name='extension', extension_version='1.0') + extension_name='extension', + extension_version='1.0', + ) url = '/v3/arguments/' - old_url = [dict( - url='/v3/old_arguments/', - json_home=flask_common.construct_json_home_data( - rel='arguments', - resource_relation_func=alt_rel_func) - )] + old_url = [ + dict( + url='/v3/old_arguments/', + json_home=flask_common.construct_json_home_data( + rel='arguments', resource_relation_func=alt_rel_func + ), + ) + ] mapping = flask_common.construct_resource_map( resource=_TestResourceWithCollectionInfo, @@ -426,16 +456,20 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): rel=resource_name, status=json_home.Status.EXPERIMENTAL, path_vars={'argument_id': param_relation}, - resource_relation_func=json_home.build_v3_resource_relation) - self.assertEqual(_TestResourceWithCollectionInfo, - mapping.resource) + resource_relation_func=json_home.build_v3_resource_relation, + ) + self.assertEqual(_TestResourceWithCollectionInfo, mapping.resource) self.assertEqual(url, mapping.url) - self.assertEqual(json_home.build_v3_resource_relation(resource_name), - mapping.json_home_data.rel) - self.assertEqual(json_home.Status.EXPERIMENTAL, - mapping.json_home_data.status) - self.assertEqual({'argument_id': param_relation}, - mapping.json_home_data.path_vars) + self.assertEqual( + json_home.build_v3_resource_relation(resource_name), + mapping.json_home_data.rel, + ) + self.assertEqual( + json_home.Status.EXPERIMENTAL, mapping.json_home_data.status + ) + self.assertEqual( + {'argument_id': param_relation}, mapping.json_home_data.path_vars + ) # Check the alternate URL data is populated sanely self.assertEqual(1, len(mapping.alternate_urls)) alt_url_data = mapping.alternate_urls[0] @@ -446,7 +480,8 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): # Test that automatic instantiation and registration to app works. self.restful_api_opts = {} self.restful_api = _TestRestfulAPI.instantiate_and_register_to_app( - self.public_app.app) + self.public_app.app + ) self.cleanup_instance('restful_api_opts') self.cleanup_instance('restful_api') self._make_requests() @@ -468,17 +503,20 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): rel='test', status=json_home.Status.STABLE, path_vars=None, - resource_relation_func=json_home.build_v3_resource_relation) + resource_relation_func=json_home.build_v3_resource_relation, + ) - restful_api = _TestRestfulAPI(resource_mapping=[resource_map], - resources=[]) + restful_api = _TestRestfulAPI( + resource_mapping=[resource_map], resources=[] + ) self.public_app.app.register_blueprint(restful_api.blueprint) token = self._get_token() with self.test_client() as c: body = {'test_value': uuid.uuid4().hex} # Works with token - resp = c.post('/v3/test_api', json=body, - headers={'X-Auth-Token': token}) + resp = c.post( + '/v3/test_api', json=body, headers={'X-Auth-Token': token} + ) self.assertEqual(body, resp.json['post_body']) # Works without token resp = c.post('/v3/test_api', json=body) @@ -503,10 +541,12 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): rel='test', status=json_home.Status.STABLE, path_vars=None, - resource_relation_func=json_home.build_v3_resource_relation) + resource_relation_func=json_home.build_v3_resource_relation, + ) - restful_api = _TestRestfulAPI(resource_mapping=[resource_map], - resources=[]) + restful_api = _TestRestfulAPI( + resource_mapping=[resource_map], resources=[] + ) self.public_app.app.register_blueprint(restful_api.blueprint) with self.test_client() as c: r = c.options('/v3/test_api') @@ -516,8 +556,13 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): # byte-string. `Content-Length` will be 0. self.assertEqual( set(['OPTIONS', 'POST']), - set([v.lstrip().rstrip() - for v in r.headers['Allow'].split(',')])) + set( + [ + v.lstrip().rstrip() + for v in r.headers['Allow'].split(',') + ] + ), + ) self.assertEqual(r.headers['Content-Length'], '0') self.assertEqual(r.data, b'') @@ -527,7 +572,8 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): class MappedResource(flask_restful.Resource): def post(self): rbac_enforcer.enforcer.RBACEnforcer().enforce_call( - action='example:allowed') + action='example:allowed' + ) post_body = flask.request.get_json() return {'post_body': post_body}, 201 @@ -539,23 +585,27 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): rel='test', status=json_home.Status.STABLE, path_vars=None, - resource_relation_func=json_home.build_v3_resource_relation) + resource_relation_func=json_home.build_v3_resource_relation, + ) - restful_api = _TestRestfulAPI(resource_mapping=[resource_map], - resources=[]) + restful_api = _TestRestfulAPI( + resource_mapping=[resource_map], resources=[] + ) self.public_app.app.register_blueprint(restful_api.blueprint) token = self._get_token() with self.test_client() as c: body = {'test_value': uuid.uuid4().hex} - resp = c.post('/v3/test_api', json=body, - headers={'X-Auth-Token': token}) + resp = c.post( + '/v3/test_api', json=body, headers={'X-Auth-Token': token} + ) self.assertEqual(body, resp.json['post_body']) def test_correct_json_home_document(self): class MappedResource(flask_restful.Resource): def post(self): rbac_enforcer.enforcer.RBACEnforcer().enforce_call( - action='example:allowed') + action='example:allowed' + ) post_body = flask.request.get_json() return {'post_body': post_body} @@ -567,17 +617,13 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): 'href-template': '/v3/arguments/{argument_id}', 'href-vars': { 'argument_id': 'https://docs.openstack.org/api/' - 'openstack-identity/3/param/argument_id' - } + 'openstack-identity/3/param/argument_id' + }, }, 'https://docs.openstack.org/api/openstack-identity/3/' - 'rel/arguments': { - 'href': '/v3/arguments' - }, + 'rel/arguments': {'href': '/v3/arguments'}, 'https://docs.openstack.org/api/openstack-identity/3/' - 'rel/test': { - 'href': '/v3/test_api' - }, + 'rel/test': {'href': '/v3/test_api'}, } resource_map = flask_common.construct_resource_map( @@ -588,7 +634,8 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): rel='test', status=json_home.Status.STABLE, path_vars=None, - resource_relation_func=json_home.build_v3_resource_relation) + resource_relation_func=json_home.build_v3_resource_relation, + ) restful_api = _TestRestfulAPI(resource_mapping=[resource_map]) self.public_app.app.register_blueprint(restful_api.blueprint) @@ -598,8 +645,10 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): resp = c.get('/', headers=headers) resp_data = jsonutils.loads(resp.data) for rel in json_home_data: - self.assertThat(resp_data['resources'][rel], - matchers.Equals(json_home_data[rel])) + self.assertThat( + resp_data['resources'][rel], + matchers.Equals(json_home_data[rel]), + ) def test_normalize_domain_id_extracts_domain_id_if_needed(self): self._setup_flask_restful_api() @@ -611,11 +660,13 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): with self.test_client() as c: # Make a dummy request.. ANY request is fine to push the whole # context stack. - c.get('%s/%s' % (url, uuid.uuid4().hex), headers=headers, - expected_status_code=404) + c.get( + '%s/%s' % (url, uuid.uuid4().hex), + headers=headers, + expected_status_code=404, + ) - oslo_context = flask.request.environ[ - context.REQUEST_CONTEXT_ENV] + oslo_context = flask.request.environ[context.REQUEST_CONTEXT_ENV] # Normal Project Scope Form # --------------------------- @@ -626,10 +677,12 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): self.assertEqual(domain_id, ref_with_domain_id['domain_id']) # Ensure (deprecated) we add default domain if needed flask_common.ResourceBase._normalize_domain_id( - ref_without_domain_id) + ref_without_domain_id + ) self.assertEqual( CONF.identity.default_domain_id, - ref_without_domain_id['domain_id']) + ref_without_domain_id['domain_id'], + ) ref_without_domain_id.clear() # Domain Scoped Form @@ -642,9 +695,11 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): flask_common.ResourceBase._normalize_domain_id(ref_with_domain_id) self.assertEqual(domain_id, ref_with_domain_id['domain_id']) flask_common.ResourceBase._normalize_domain_id( - ref_without_domain_id) - self.assertEqual(oslo_context.domain_id, - ref_without_domain_id['domain_id']) + ref_without_domain_id + ) + self.assertEqual( + oslo_context.domain_id, ref_without_domain_id['domain_id'] + ) ref_without_domain_id.clear() # "Admin" Token form @@ -658,9 +713,11 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): self.assertEqual(domain_id, ref_with_domain_id['domain_id']) # Ensure we raise an appropriate exception with the inferred # domain_id - self.assertRaises(exception.ValidationError, - flask_common.ResourceBase._normalize_domain_id, - ref=ref_without_domain_id) + self.assertRaises( + exception.ValidationError, + flask_common.ResourceBase._normalize_domain_id, + ref=ref_without_domain_id, + ) def test_api_prefix_self_referential_link_substitution(self): @@ -673,8 +730,8 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): # therefore we don't need the heavy lifting of a full request # run. with self.test_request_context( - path='/%s/nothing/values' % view_arg, - base_url='https://localhost/'): + path='/%s/nothing/values' % view_arg, base_url='https://localhost/' + ): # explicitly set the view_args, this is a special case # for a synthetic test case, usually one would rely on # a full request stack to set these. @@ -685,28 +742,33 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): # add the self referential link TestResource._add_self_referential_link( - ref, collection_name='values') + ref, collection_name='values' + ) # Check that the link in fact starts with what we expect # including the explicit view arg. - self.assertTrue(ref['links']['self'].startswith( - 'https://localhost/v3/%s' % view_arg) + self.assertTrue( + ref['links']['self'].startswith( + 'https://localhost/v3/%s' % view_arg + ) ) def test_json_body_before_req_func_valid_json(self): with self.test_request_context( - headers={'Content-Type': 'application/json'}, - data='{"key": "value"}'): + headers={'Content-Type': 'application/json'}, + data='{"key": "value"}', + ): # No exception should be raised, everything is happy. json_body.json_body_before_request() def test_json_body_before_req_func_invalid_json(self): with self.test_request_context( - headers={'Content-Type': 'application/json'}, - data='invalid JSON'): + headers={'Content-Type': 'application/json'}, data='invalid JSON' + ): # keystone.exception.ValidationError should be raised - self.assertRaises(exception.ValidationError, - json_body.json_body_before_request) + self.assertRaises( + exception.ValidationError, json_body.json_body_before_request + ) def test_json_body_before_req_func_no_content_type(self): # Unset @@ -716,21 +778,25 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): # Explicitly set to '' with self.test_request_context( - headers={'Content-Type': ''}, data='{"key": "value"}'): + headers={'Content-Type': ''}, data='{"key": "value"}' + ): # No exception should be raised, everything is happy. json_body.json_body_before_request() def test_json_body_before_req_func_unrecognized_content_type(self): with self.test_request_context( - headers={'Content-Type': 'unrecognized/content-type'}, - data='{"key": "value"'): + headers={'Content-Type': 'unrecognized/content-type'}, + data='{"key": "value"', + ): # keystone.exception.ValidationError should be raised - self.assertRaises(exception.ValidationError, - json_body.json_body_before_request) + self.assertRaises( + exception.ValidationError, json_body.json_body_before_request + ) def test_json_body_before_req_func_unrecognized_conten_type_no_body(self): with self.test_request_context( - headers={'Content-Type': 'unrecognized/content-type'}): + headers={'Content-Type': 'unrecognized/content-type'} + ): # No exception should be raised, everything is happy. json_body.json_body_before_request() @@ -745,8 +811,7 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): self.assertRaises(ValueError, getattr, r, 'collection_key') r = TestResourceWithKey() - self.assertEqual( - TestResourceWithKey.collection_key, r.collection_key) + self.assertEqual(TestResourceWithKey.collection_key, r.collection_key) def test_resource_member_key_raises_exception_if_unset(self): class TestResource(flask_common.ResourceBase): @@ -759,8 +824,7 @@ class TestKeystoneFlaskCommon(rest.RestfulTestCase): self.assertRaises(ValueError, getattr, r, 'member_key') r = TestResourceWithKey() - self.assertEqual( - TestResourceWithKey.member_key, r.member_key) + self.assertEqual(TestResourceWithKey.member_key, r.member_key) class TestKeystoneFlaskUnrouted404(rest.RestfulTestCase): diff --git a/keystone/tests/unit/test_app_config.py b/keystone/tests/unit/test_app_config.py index 9200d59f40..160aeb81d3 100644 --- a/keystone/tests/unit/test_app_config.py +++ b/keystone/tests/unit/test_app_config.py @@ -32,8 +32,7 @@ class AppConfigTest(unit.TestCase): self.assertListEqual(config_files, expected_config_files) def test_config_files_have_default_values_with_empty_envars(self): - env = {'OS_KEYSTONE_CONFIG_FILES': '', - 'OS_KEYSTONE_CONFIG_DIR': ''} + env = {'OS_KEYSTONE_CONFIG_FILES': '', 'OS_KEYSTONE_CONFIG_DIR': ''} config_files = server_flask._get_config_files(env) config_files.sort() expected_config_files = [] @@ -67,8 +66,10 @@ class AppConfigTest(unit.TestCase): self.assertListEqual(config_files, [cfgpath]) def test_can_use_multiple_absolute_path_config_files(self): - cfgpaths = [os.path.join(self.custom_config_dir, cfg) - for cfg in self.custom_config_files] + cfgpaths = [ + os.path.join(self.custom_config_dir, cfg) + for cfg in self.custom_config_files + ] cfgpaths.sort() env = {'OS_KEYSTONE_CONFIG_FILES': ';'.join(cfgpaths)} config_files = server_flask._get_config_files(env) @@ -84,44 +85,57 @@ class AppConfigTest(unit.TestCase): env = {'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir} config_files = server_flask._get_config_files(env) config_files.sort() - expected_config_files = [os.path.join(self.custom_config_dir, - self.default_config_file)] + expected_config_files = [ + os.path.join(self.custom_config_dir, self.default_config_file) + ] self.assertListEqual(config_files, expected_config_files) def test_can_use_single_config_file_under_custom_config_dir(self): cfg = self.custom_config_files[0] - env = {'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir, - 'OS_KEYSTONE_CONFIG_FILES': cfg} + env = { + 'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir, + 'OS_KEYSTONE_CONFIG_FILES': cfg, + } config_files = server_flask._get_config_files(env) config_files.sort() expected_config_files = [os.path.join(self.custom_config_dir, cfg)] self.assertListEqual(config_files, expected_config_files) def test_can_use_multiple_config_files_under_custom_config_dir(self): - env = {'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir, - 'OS_KEYSTONE_CONFIG_FILES': ';'.join(self.custom_config_files)} + env = { + 'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir, + 'OS_KEYSTONE_CONFIG_FILES': ';'.join(self.custom_config_files), + } config_files = server_flask._get_config_files(env) config_files.sort() - expected_config_files = [os.path.join(self.custom_config_dir, s) - for s in self.custom_config_files] + expected_config_files = [ + os.path.join(self.custom_config_dir, s) + for s in self.custom_config_files + ] expected_config_files.sort() self.assertListEqual(config_files, expected_config_files) config_with_empty_strings = self.custom_config_files + ['', ' '] - env = {'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir, - 'OS_KEYSTONE_CONFIG_FILES': ';'.join(config_with_empty_strings)} + env = { + 'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir, + 'OS_KEYSTONE_CONFIG_FILES': ';'.join(config_with_empty_strings), + } config_files = server_flask._get_config_files(env) config_files.sort() self.assertListEqual(config_files, expected_config_files) def test_can_mix_relative_and_absolute_paths_config_file(self): cfg0 = self.custom_config_files[0] - cfgpath0 = os.path.join(self.custom_config_dir, - self.custom_config_files[0]) - cfgpath1 = os.path.join(self.custom_config_dir, - self.custom_config_files[1]) - env = {'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir, - 'OS_KEYSTONE_CONFIG_FILES': ';'.join([cfg0, cfgpath1])} + cfgpath0 = os.path.join( + self.custom_config_dir, self.custom_config_files[0] + ) + cfgpath1 = os.path.join( + self.custom_config_dir, self.custom_config_files[1] + ) + env = { + 'OS_KEYSTONE_CONFIG_DIR': self.custom_config_dir, + 'OS_KEYSTONE_CONFIG_FILES': ';'.join([cfg0, cfgpath1]), + } config_files = server_flask._get_config_files(env) config_files.sort() expected_config_files = [cfgpath0, cfgpath1] diff --git a/keystone/tests/unit/test_associate_project_endpoint_extension.py b/keystone/tests/unit/test_associate_project_endpoint_extension.py index 78673f21ac..b45e048a6b 100644 --- a/keystone/tests/unit/test_associate_project_endpoint_extension.py +++ b/keystone/tests/unit/test_associate_project_endpoint_extension.py @@ -31,9 +31,12 @@ class EndpointFilterTestCase(test_v3.RestfulTestCase): super(EndpointFilterTestCase, self).setUp() self.default_request_url = ( '/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { + '/endpoints/%(endpoint_id)s' + % { 'project_id': self.default_domain_project_id, - 'endpoint_id': self.endpoint_id}) + 'endpoint_id': self.endpoint_id, + } + ) class EndpointFilterCRUDTestCase(EndpointFilterTestCase): @@ -52,11 +55,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): Invalid project id test case. """ - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': uuid.uuid4().hex, - 'endpoint_id': self.endpoint_id}, - expected_status=http.client.NOT_FOUND) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': uuid.uuid4().hex, + 'endpoint_id': self.endpoint_id, + }, + expected_status=http.client.NOT_FOUND, + ) def test_create_endpoint_project_association_with_invalid_endpoint(self): """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -64,11 +71,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): Invalid endpoint id test case. """ - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': uuid.uuid4().hex}, - expected_status=http.client.NOT_FOUND) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': uuid.uuid4().hex, + }, + expected_status=http.client.NOT_FOUND, + ) def test_create_endpoint_project_association_with_unexpected_body(self): """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -76,8 +87,10 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): Unexpected body in request. The body should be ignored. """ - self.put(self.default_request_url, - body={'project_id': self.default_domain_project_id}) + self.put( + self.default_request_url, + body={'project_id': self.default_domain_project_id}, + ) def test_check_endpoint_project_association(self): """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -86,11 +99,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.head('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': self.endpoint_id}, - expected_status=http.client.NO_CONTENT) + self.head( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': self.endpoint_id, + }, + expected_status=http.client.NO_CONTENT, + ) def test_check_endpoint_project_association_with_invalid_project(self): """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -99,11 +116,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.head('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': uuid.uuid4().hex, - 'endpoint_id': self.endpoint_id}, - expected_status=http.client.NOT_FOUND) + self.head( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': uuid.uuid4().hex, + 'endpoint_id': self.endpoint_id, + }, + expected_status=http.client.NOT_FOUND, + ) def test_check_endpoint_project_association_with_invalid_endpoint(self): """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -112,11 +133,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.head('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': uuid.uuid4().hex}, - expected_status=http.client.NOT_FOUND) + self.head( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': uuid.uuid4().hex, + }, + expected_status=http.client.NOT_FOUND, + ) def test_get_endpoint_project_association(self): """GET /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -125,11 +150,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.get('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': self.endpoint_id}, - expected_status=http.client.NO_CONTENT) + self.get( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': self.endpoint_id, + }, + expected_status=http.client.NO_CONTENT, + ) def test_get_endpoint_project_association_with_invalid_project(self): """GET /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -138,11 +167,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.get('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': uuid.uuid4().hex, - 'endpoint_id': self.endpoint_id}, - expected_status=http.client.NOT_FOUND) + self.get( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': uuid.uuid4().hex, + 'endpoint_id': self.endpoint_id, + }, + expected_status=http.client.NOT_FOUND, + ) def test_get_endpoint_project_association_with_invalid_endpoint(self): """GET /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -151,11 +184,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.get('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': uuid.uuid4().hex}, - expected_status=http.client.NOT_FOUND) + self.get( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': uuid.uuid4().hex, + }, + expected_status=http.client.NOT_FOUND, + ) def test_list_endpoints_associated_with_valid_project(self): """GET & HEAD /OS-EP-FILTER/projects/{project_id}/endpoints. @@ -165,10 +202,12 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) resource_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { - 'project_id': self.default_domain_project_id} + 'project_id': self.default_domain_project_id + } r = self.get(resource_url) - self.assertValidEndpointListResponse(r, self.endpoint, - resource_url=resource_url) + self.assertValidEndpointListResponse( + r, self.endpoint, resource_url=resource_url + ) self.head(resource_url, expected_status=http.client.OK) def test_list_endpoints_associated_with_invalid_project(self): @@ -178,9 +217,9 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { - 'project_id': uuid.uuid4().hex} - ) + url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { + 'project_id': uuid.uuid4().hex + } self.get(url, expected_status=http.client.NOT_FOUND) self.head(url, expected_status=http.client.NOT_FOUND) @@ -192,10 +231,12 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) resource_url = '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % { - 'endpoint_id': self.endpoint_id} + 'endpoint_id': self.endpoint_id + } r = self.get(resource_url, expected_status=http.client.OK) - self.assertValidProjectListResponse(r, self.default_domain_project, - resource_url=resource_url) + self.assertValidProjectListResponse( + r, self.default_domain_project, resource_url=resource_url + ) self.head(resource_url, expected_status=http.client.OK) def test_list_projects_with_no_endpoint_project_association(self): @@ -204,10 +245,9 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): Valid endpoint id but no endpoint-project associations test case. """ - url = ( - '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % - {'endpoint_id': self.endpoint_id} - ) + url = '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % { + 'endpoint_id': self.endpoint_id + } r = self.get(url, expected_status=http.client.OK) self.assertValidProjectListResponse(r, expected_length=0) self.head(url, expected_status=http.client.OK) @@ -218,10 +258,9 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): Invalid endpoint id test case. """ - url = ( - '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % - {'endpoint_id': uuid.uuid4().hex} - ) + url = '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % { + 'endpoint_id': uuid.uuid4().hex + } self.get(url, expected_status=http.client.NOT_FOUND) self.head(url, expected_status=http.client.NOT_FOUND) @@ -232,10 +271,14 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.delete('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': self.endpoint_id}) + self.delete( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': self.endpoint_id, + } + ) def test_remove_endpoint_project_association_with_invalid_project(self): """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -244,11 +287,15 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.delete('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': uuid.uuid4().hex, - 'endpoint_id': self.endpoint_id}, - expected_status=http.client.NOT_FOUND) + self.delete( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': uuid.uuid4().hex, + 'endpoint_id': self.endpoint_id, + }, + expected_status=http.client.NOT_FOUND, + ) def test_remove_endpoint_project_association_with_invalid_endpoint(self): """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}. @@ -257,21 +304,29 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): """ self.put(self.default_request_url) - self.delete('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': uuid.uuid4().hex}, - expected_status=http.client.NOT_FOUND) + self.delete( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': uuid.uuid4().hex, + }, + expected_status=http.client.NOT_FOUND, + ) def test_endpoint_project_association_cleanup_when_project_deleted(self): self.put(self.default_request_url) - association_url = ('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % - {'endpoint_id': self.endpoint_id}) + association_url = ( + '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' + % {'endpoint_id': self.endpoint_id} + ) r = self.get(association_url) self.assertValidProjectListResponse(r, expected_length=1) - self.delete('/projects/%(project_id)s' % { - 'project_id': self.default_domain_project_id}) + self.delete( + '/projects/%(project_id)s' + % {'project_id': self.default_domain_project_id} + ) r = self.get(association_url) self.assertValidProjectListResponse(r, expected_length=0) @@ -279,12 +334,14 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): def test_endpoint_project_association_cleanup_when_endpoint_deleted(self): self.put(self.default_request_url) association_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { - 'project_id': self.default_domain_project_id} + 'project_id': self.default_domain_project_id + } r = self.get(association_url) self.assertValidEndpointListResponse(r, expected_length=1) - self.delete('/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}) + self.delete( + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint_id} + ) r = self.get(association_url) self.assertValidEndpointListResponse(r, expected_length=0) @@ -295,10 +352,12 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): # default project, this should be done at first since # `create_endpoint` will also invalidate cache. endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='public', - id=endpoint_id2) + endpoint2 = unit.new_endpoint_ref( + service_id=self.service_id, + region_id=self.region_id, + interface='public', + id=endpoint_id2, + ) PROVIDERS.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy()) # create endpoint project association. @@ -307,8 +366,8 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): # should get back only one endpoint that was just created. user_id = uuid.uuid4().hex catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) # there is only one endpoints associated with the default project. self.assertEqual(1, len(catalog[0]['endpoints'])) @@ -317,106 +376,124 @@ class EndpointFilterCRUDTestCase(EndpointFilterTestCase): # add the second endpoint to default project, bypassing # catalog_api API manager. PROVIDERS.catalog_api.driver.add_endpoint_to_project( - endpoint_id2, - self.default_domain_project_id) + endpoint_id2, self.default_domain_project_id + ) # but, we can just get back one endpoint from the cache, since the # catalog is pulled out from cache and its haven't been invalidated. catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertEqual(1, len(catalog[0]['endpoints'])) # remove the endpoint2 from the default project, and add it again via # catalog_api API manager. PROVIDERS.catalog_api.driver.remove_endpoint_from_project( - endpoint_id2, - self.default_domain_project_id) + endpoint_id2, self.default_domain_project_id + ) # add second endpoint to default project, this can be done by calling # the catalog_api API manager directly but call the REST API # instead for consistency. - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': endpoint_id2}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': endpoint_id2, + } + ) # should get back two endpoints since the cache has been # invalidated when the second endpoint was added to default project. catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertEqual(2, len(catalog[0]['endpoints'])) - ep_id_list = [catalog[0]['endpoints'][0]['id'], - catalog[0]['endpoints'][1]['id']] + ep_id_list = [ + catalog[0]['endpoints'][0]['id'], + catalog[0]['endpoints'][1]['id'], + ] self.assertCountEqual([self.endpoint_id, endpoint_id2], ep_id_list) @unit.skip_if_cache_disabled('catalog') def test_remove_endpoint_from_project_invalidates_cache(self): endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='public', - id=endpoint_id2) + endpoint2 = unit.new_endpoint_ref( + service_id=self.service_id, + region_id=self.region_id, + interface='public', + id=endpoint_id2, + ) PROVIDERS.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy()) # create endpoint project association. self.put(self.default_request_url) # add second endpoint to default project. - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': endpoint_id2}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': endpoint_id2, + } + ) # should get back only one endpoint that was just created. user_id = uuid.uuid4().hex catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) # there are two endpoints associated with the default project. - ep_id_list = [catalog[0]['endpoints'][0]['id'], - catalog[0]['endpoints'][1]['id']] + ep_id_list = [ + catalog[0]['endpoints'][0]['id'], + catalog[0]['endpoints'][1]['id'], + ] self.assertEqual(2, len(catalog[0]['endpoints'])) self.assertCountEqual([self.endpoint_id, endpoint_id2], ep_id_list) # remove the endpoint2 from the default project, bypassing # catalog_api API manager. PROVIDERS.catalog_api.driver.remove_endpoint_from_project( - endpoint_id2, - self.default_domain_project_id) + endpoint_id2, self.default_domain_project_id + ) # but, we can just still get back two endpoints from the cache, # since the catalog is pulled out from cache and its haven't # been invalidated. catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertEqual(2, len(catalog[0]['endpoints'])) # add back the endpoint2 to the default project, and remove it by # catalog_api API manage. PROVIDERS.catalog_api.driver.add_endpoint_to_project( - endpoint_id2, - self.default_domain_project_id) + endpoint_id2, self.default_domain_project_id + ) # remove the endpoint2 from the default project, this can be done # by calling the catalog_api API manager directly but call # the REST API instead for consistency. - self.delete('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': endpoint_id2}) + self.delete( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.default_domain_project_id, + 'endpoint_id': endpoint_id2, + } + ) # should only get back one endpoint since the cache has been # invalidated after the endpoint project association was removed. catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertEqual(1, len(catalog[0]['endpoints'])) self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id']) @@ -433,56 +510,62 @@ class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase): # grant the user a role on the project self.put( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { + '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' + % { 'user_id': self.user['id'], 'project_id': project['id'], - 'role_id': self.role['id']}) + 'role_id': self.role['id'], + } + ) # set the user's preferred project body = {'user': {'default_project_id': project['id']}} - r = self.patch('/users/%(user_id)s' % { - 'user_id': self.user['id']}, - body=body) + r = self.patch( + '/users/%(user_id)s' % {'user_id': self.user['id']}, body=body + ) self.assertValidUserResponse(r) # add one endpoint to the project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': project['id'], - 'endpoint_id': self.endpoint_id}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % {'project_id': project['id'], 'endpoint_id': self.endpoint_id} + ) # attempt to authenticate without requesting a project auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.post('/auth/tokens', body=auth_data) self.assertValidProjectScopedTokenResponse( - r, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=1) + r, require_catalog=True, endpoint_filter=True, ep_filter_assoc=1 + ) self.assertEqual(project['id'], r.result['token']['project']['id']) def test_default_scoped_token_using_endpoint_filter(self): """Verify endpoints from default scoped token filtered.""" # add one endpoint to default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.project['id'], + 'endpoint_id': self.endpoint_id, + } + ) auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.post('/auth/tokens', body=auth_data) self.assertValidProjectScopedTokenResponse( - r, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=1) - self.assertEqual(self.project['id'], - r.result['token']['project']['id']) + r, require_catalog=True, endpoint_filter=True, ep_filter_assoc=1 + ) + self.assertEqual( + self.project['id'], r.result['token']['project']['id'] + ) # Ensure name of the service exists self.assertIn('name', r.result['token']['catalog'][0]) @@ -495,43 +578,54 @@ class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase): def test_scoped_token_with_no_catalog_using_endpoint_filter(self): """Verify endpoint filter does not affect no catalog.""" - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.project['id'], + 'endpoint_id': self.endpoint_id, + } + ) auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.post('/auth/tokens?nocatalog', body=auth_data) - self.assertValidProjectScopedTokenResponse( - r, - require_catalog=False) - self.assertEqual(self.project['id'], - r.result['token']['project']['id']) + self.assertValidProjectScopedTokenResponse(r, require_catalog=False) + self.assertEqual( + self.project['id'], r.result['token']['project']['id'] + ) def test_invalid_endpoint_project_association(self): """Verify an invalid endpoint-project association is handled.""" # add first endpoint to default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.project['id'], + 'endpoint_id': self.endpoint_id, + } + ) # create a second temporary endpoint endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='public', - id=endpoint_id2) + endpoint2 = unit.new_endpoint_ref( + service_id=self.service_id, + region_id=self.region_id, + interface='public', + id=endpoint_id2, + ) PROVIDERS.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy()) # add second endpoint to default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': endpoint_id2}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % {'project_id': self.project['id'], 'endpoint_id': endpoint_id2} + ) # remove the temporary reference # this will create inconsistency in the endpoint filter table @@ -541,48 +635,59 @@ class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.post('/auth/tokens', body=auth_data) self.assertValidProjectScopedTokenResponse( - r, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=1) - self.assertEqual(self.project['id'], - r.result['token']['project']['id']) + r, require_catalog=True, endpoint_filter=True, ep_filter_assoc=1 + ) + self.assertEqual( + self.project['id'], r.result['token']['project']['id'] + ) def test_disabled_endpoint(self): """Test that a disabled endpoint is handled.""" # Add an enabled endpoint to the default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.project['id'], + 'endpoint_id': self.endpoint_id, + } + ) # Add a disabled endpoint to the default project. # Create a disabled endpoint that's like the enabled one. disabled_endpoint_ref = copy.copy(self.endpoint) disabled_endpoint_id = uuid.uuid4().hex - disabled_endpoint_ref.update({ - 'id': disabled_endpoint_id, - 'enabled': False, - 'interface': 'internal' - }) + disabled_endpoint_ref.update( + { + 'id': disabled_endpoint_id, + 'enabled': False, + 'interface': 'internal', + } + ) PROVIDERS.catalog_api.create_endpoint( disabled_endpoint_id, disabled_endpoint_ref ) - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': disabled_endpoint_id}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.project['id'], + 'endpoint_id': disabled_endpoint_id, + } + ) # Authenticate to get token with catalog auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.post('/auth/tokens', body=auth_data) endpoints = r.result['token']['catalog'][0]['endpoints'] @@ -592,9 +697,11 @@ class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase): def test_multiple_endpoint_project_associations(self): def _create_an_endpoint(): - endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) + endpoint_ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + ) r = self.post('/endpoints', body={'endpoint': endpoint_ref}) return r.result['endpoint']['id'] @@ -604,49 +711,59 @@ class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase): _create_an_endpoint() # only associate two endpoints with project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': endpoint_id1}) - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': endpoint_id2}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % {'project_id': self.project['id'], 'endpoint_id': endpoint_id1} + ) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % {'project_id': self.project['id'], 'endpoint_id': endpoint_id2} + ) # there should be only two endpoints in token catalog auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.post('/auth/tokens', body=auth_data) self.assertValidProjectScopedTokenResponse( - r, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=2) + r, require_catalog=True, endpoint_filter=True, ep_filter_assoc=2 + ) def test_get_auth_catalog_using_endpoint_filter(self): # add one endpoint to default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % { + 'project_id': self.project['id'], + 'endpoint_id': self.endpoint_id, + } + ) auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) token_data = self.post('/auth/tokens', body=auth_data) self.assertValidProjectScopedTokenResponse( token_data, require_catalog=True, endpoint_filter=True, - ep_filter_assoc=1) + ep_filter_assoc=1, + ) - auth_catalog = self.get('/auth/catalog', - token=token_data.headers['X-Subject-Token']) - self.assertEqual(token_data.result['token']['catalog'], - auth_catalog.result['catalog']) + auth_catalog = self.get( + '/auth/catalog', token=token_data.headers['X-Subject-Token'] + ) + self.assertEqual( + token_data.result['token']['catalog'], + auth_catalog.result['catalog'], + ) class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin): @@ -655,8 +772,7 @@ class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin): '1.0/rel/endpoint_projects': { 'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects', 'href-vars': { - 'endpoint_id': - 'https://docs.openstack.org/api/openstack-identity/3/param/' + 'endpoint_id': 'https://docs.openstack.org/api/openstack-identity/3/param/' 'endpoint_id', }, }, @@ -669,8 +785,7 @@ class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin): 'href-template': '/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}', 'href-vars': { - 'endpoint_group_id': - 'https://docs.openstack.org/api/openstack-identity/3/' + 'endpoint_group_id': 'https://docs.openstack.org/api/openstack-identity/3/' 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id', }, }, @@ -679,11 +794,9 @@ class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin): 'href-template': '/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}/projects/{project_id}', 'href-vars': { - 'project_id': - 'https://docs.openstack.org/api/openstack-identity/3/param/' + 'project_id': 'https://docs.openstack.org/api/openstack-identity/3/param/' 'project_id', - 'endpoint_group_id': - 'https://docs.openstack.org/api/openstack-identity/3/' + 'endpoint_group_id': 'https://docs.openstack.org/api/openstack-identity/3/' 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id', }, }, @@ -692,8 +805,7 @@ class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin): 'href-template': '/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}/projects', 'href-vars': { - 'endpoint_group_id': - 'https://docs.openstack.org/api/openstack-identity/3/' + 'endpoint_group_id': 'https://docs.openstack.org/api/openstack-identity/3/' 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id', }, }, @@ -702,8 +814,7 @@ class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin): 'href-template': '/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}/endpoints', 'href-vars': { - 'endpoint_group_id': - 'https://docs.openstack.org/api/openstack-identity/3/' + 'endpoint_group_id': 'https://docs.openstack.org/api/openstack-identity/3/' 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id', }, }, @@ -712,8 +823,7 @@ class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin): 'href-template': '/OS-EP-FILTER/projects/{project_id}/' 'endpoint_groups', 'href-vars': { - 'project_id': - 'https://docs.openstack.org/api/openstack-identity/3/param/' + 'project_id': 'https://docs.openstack.org/api/openstack-identity/3/param/' 'project_id', }, }, @@ -725,10 +835,8 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): DEFAULT_ENDPOINT_GROUP_BODY = { 'endpoint_group': { 'description': 'endpoint group description', - 'filters': { - 'interface': 'admin' - }, - 'name': 'endpoint_group_name' + 'filters': {'interface': 'admin'}, + 'name': 'endpoint_group_name', } } @@ -740,20 +848,27 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): Valid endpoint group test case. """ - r = self.post(self.DEFAULT_ENDPOINT_GROUP_URL, - body=self.DEFAULT_ENDPOINT_GROUP_BODY) - expected_filters = (self.DEFAULT_ENDPOINT_GROUP_BODY - ['endpoint_group']['filters']) - expected_name = (self.DEFAULT_ENDPOINT_GROUP_BODY - ['endpoint_group']['name']) - self.assertEqual(expected_filters, - r.result['endpoint_group']['filters']) + r = self.post( + self.DEFAULT_ENDPOINT_GROUP_URL, + body=self.DEFAULT_ENDPOINT_GROUP_BODY, + ) + expected_filters = self.DEFAULT_ENDPOINT_GROUP_BODY['endpoint_group'][ + 'filters' + ] + expected_name = self.DEFAULT_ENDPOINT_GROUP_BODY['endpoint_group'][ + 'name' + ] + self.assertEqual( + expected_filters, r.result['endpoint_group']['filters'] + ) self.assertEqual(expected_name, r.result['endpoint_group']['name']) self.assertThat( r.result['endpoint_group']['links']['self'], matchers.EndsWith( - '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': r.result['endpoint_group']['id']})) + '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' + % {'endpoint_group_id': r.result['endpoint_group']['id']} + ), + ) def test_create_invalid_endpoint_group(self): """POST /OS-EP-FILTER/endpoint_groups. @@ -763,9 +878,11 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ invalid_body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY) invalid_body['endpoint_group']['filters'] = {'foobar': 'admin'} - self.post(self.DEFAULT_ENDPOINT_GROUP_URL, - body=invalid_body, - expected_status=http.client.BAD_REQUEST) + self.post( + self.DEFAULT_ENDPOINT_GROUP_URL, + body=invalid_body, + expected_status=http.client.BAD_REQUEST, + ) def test_get_endpoint_group(self): """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}. @@ -774,22 +891,31 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ # create an endpoint group to work with - response = self.post(self.DEFAULT_ENDPOINT_GROUP_URL, - body=self.DEFAULT_ENDPOINT_GROUP_BODY) + response = self.post( + self.DEFAULT_ENDPOINT_GROUP_URL, + body=self.DEFAULT_ENDPOINT_GROUP_BODY, + ) endpoint_group_id = response.result['endpoint_group']['id'] endpoint_group_filters = response.result['endpoint_group']['filters'] endpoint_group_name = response.result['endpoint_group']['name'] url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } self.get(url) - self.assertEqual(endpoint_group_id, - response.result['endpoint_group']['id']) - self.assertEqual(endpoint_group_filters, - response.result['endpoint_group']['filters']) - self.assertEqual(endpoint_group_name, - response.result['endpoint_group']['name']) - self.assertThat(response.result['endpoint_group']['links']['self'], - matchers.EndsWith(url)) + self.assertEqual( + endpoint_group_id, response.result['endpoint_group']['id'] + ) + self.assertEqual( + endpoint_group_filters, + response.result['endpoint_group']['filters'], + ) + self.assertEqual( + endpoint_group_name, response.result['endpoint_group']['name'] + ) + self.assertThat( + response.result['endpoint_group']['links']['self'], + matchers.EndsWith(url), + ) def test_get_invalid_endpoint_group(self): """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}. @@ -799,7 +925,8 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ endpoint_group_id = 'foobar' url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } self.get(url, expected_status=http.client.NOT_FOUND) def test_check_endpoint_group(self): @@ -810,9 +937,11 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } self.head(url, expected_status=http.client.OK) def test_check_invalid_endpoint_group(self): @@ -823,7 +952,8 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ endpoint_group_id = 'foobar' url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } self.head(url, expected_status=http.client.NOT_FOUND) def test_patch_endpoint_group(self): @@ -837,16 +967,20 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): body['endpoint_group']['name'] = 'patch_test' # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } r = self.patch(url, body=body) - self.assertEqual(endpoint_group_id, - r.result['endpoint_group']['id']) - self.assertEqual(body['endpoint_group']['filters'], - r.result['endpoint_group']['filters']) - self.assertThat(r.result['endpoint_group']['links']['self'], - matchers.EndsWith(url)) + self.assertEqual(endpoint_group_id, r.result['endpoint_group']['id']) + self.assertEqual( + body['endpoint_group']['filters'], + r.result['endpoint_group']['filters'], + ) + self.assertThat( + r.result['endpoint_group']['links']['self'], matchers.EndsWith(url) + ) def test_patch_nonexistent_endpoint_group(self): """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group}. @@ -854,13 +988,10 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): Invalid endpoint group patch test case. """ - body = { - 'endpoint_group': { - 'name': 'patch_test' - } - } + body = {'endpoint_group': {'name': 'patch_test'}} url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': 'ABC'} + 'endpoint_group_id': 'ABC' + } self.patch(url, body=body, expected_status=http.client.NOT_FOUND) def test_patch_invalid_endpoint_group(self): @@ -872,24 +1003,25 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): body = { 'endpoint_group': { 'description': 'endpoint group description', - 'filters': { - 'region': 'UK' - }, - 'name': 'patch_test' + 'filters': {'region': 'UK'}, + 'name': 'patch_test', } } # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } self.patch(url, body=body, expected_status=http.client.BAD_REQUEST) # Perform a GET call to ensure that the content remains # the same (as DEFAULT_ENDPOINT_GROUP_BODY) after attempting to update # with an invalid filter url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } r = self.get(url) del r.result['endpoint_group']['id'] del r.result['endpoint_group']['links'] @@ -903,9 +1035,11 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } self.delete(url) self.get(url, expected_status=http.client.NOT_FOUND) @@ -917,87 +1051,102 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ endpoint_group_id = 'foobar' url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } self.delete(url, expected_status=http.client.NOT_FOUND) def test_add_endpoint_group_to_project(self): """Create a valid endpoint group and project association.""" endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - self._create_endpoint_group_project_association(endpoint_group_id, - self.project_id) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) + self._create_endpoint_group_project_association( + endpoint_group_id, self.project_id + ) def test_add_endpoint_group_to_project_with_invalid_project_id(self): """Create an invalid endpoint group and project association.""" # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # associate endpoint group with project project_id = uuid.uuid4().hex url = self._get_project_endpoint_group_url( - endpoint_group_id, project_id) + endpoint_group_id, project_id + ) self.put(url, expected_status=http.client.NOT_FOUND) def test_get_endpoint_group_in_project(self): """Test retrieving project endpoint group association.""" # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # associate endpoint group with project url = self._get_project_endpoint_group_url( - endpoint_group_id, self.project_id) + endpoint_group_id, self.project_id + ) self.put(url) response = self.get(url) self.assertEqual( endpoint_group_id, - response.result['project_endpoint_group']['endpoint_group_id']) + response.result['project_endpoint_group']['endpoint_group_id'], + ) self.assertEqual( self.project_id, - response.result['project_endpoint_group']['project_id']) + response.result['project_endpoint_group']['project_id'], + ) def test_get_invalid_endpoint_group_in_project(self): """Test retrieving project endpoint group association.""" endpoint_group_id = uuid.uuid4().hex project_id = uuid.uuid4().hex url = self._get_project_endpoint_group_url( - endpoint_group_id, project_id) + endpoint_group_id, project_id + ) self.get(url, expected_status=http.client.NOT_FOUND) def test_list_endpoint_groups_in_project(self): """GET & HEAD /OS-EP-FILTER/projects/{project_id}/endpoint_groups.""" # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # associate endpoint group with project url = self._get_project_endpoint_group_url( - endpoint_group_id, self.project_id) + endpoint_group_id, self.project_id + ) self.put(url) - url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % - {'project_id': self.project_id}) + url = '/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % { + 'project_id': self.project_id + } response = self.get(url, expected_status=http.client.OK) self.assertEqual( - endpoint_group_id, - response.result['endpoint_groups'][0]['id']) + endpoint_group_id, response.result['endpoint_groups'][0]['id'] + ) self.head(url, expected_status=http.client.OK) def test_list_endpoint_groups_in_invalid_project(self): """Test retrieving from invalid project.""" project_id = uuid.uuid4().hex - url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % - {'project_id': project_id}) + url = '/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % { + 'project_id': project_id + } self.get(url, expected_status=http.client.NOT_FOUND) self.head(url, expected_status=http.client.NOT_FOUND) def test_empty_endpoint_groups_in_project(self): """Test when no endpoint groups associated with the project.""" - url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % - {'project_id': self.project_id}) + url = '/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % { + 'project_id': self.project_id + } response = self.get(url, expected_status=http.client.OK) self.assertEqual(0, len(response.result['endpoint_groups'])) @@ -1007,42 +1156,50 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): def test_check_endpoint_group_to_project(self): """Test HEAD with a valid endpoint group and project association.""" endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - self._create_endpoint_group_project_association(endpoint_group_id, - self.project_id) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) + self._create_endpoint_group_project_association( + endpoint_group_id, self.project_id + ) url = self._get_project_endpoint_group_url( - endpoint_group_id, self.project_id) + endpoint_group_id, self.project_id + ) self.head(url, expected_status=http.client.OK) def test_check_endpoint_group_to_project_with_invalid_project_id(self): """Test HEAD with an invalid endpoint group and project association.""" # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # create an endpoint group to project association url = self._get_project_endpoint_group_url( - endpoint_group_id, self.project_id) + endpoint_group_id, self.project_id + ) self.put(url) # send a head request with an invalid project id project_id = uuid.uuid4().hex url = self._get_project_endpoint_group_url( - endpoint_group_id, project_id) + endpoint_group_id, project_id + ) self.head(url, expected_status=http.client.NOT_FOUND) def test_list_endpoint_groups(self): """GET & HEAD /OS-EP-FILTER/endpoint_groups.""" # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # recover all endpoint groups url = '/OS-EP-FILTER/endpoint_groups' r = self.get(url, expected_status=http.client.OK) self.assertNotEmpty(r.result['endpoint_groups']) - self.assertEqual(endpoint_group_id, - r.result['endpoint_groups'][0].get('id')) + self.assertEqual( + endpoint_group_id, r.result['endpoint_groups'][0].get('id') + ) self.head(url, expected_status=http.client.OK) @@ -1050,22 +1207,24 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """GET & HEAD /OS-EP-FILTER/endpoint_groups.""" # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # retrieve the single endpointgroup by name - url = ('/OS-EP-FILTER/endpoint_groups?name=%(name)s' % - {'name': 'endpoint_group_name'}) + url = '/OS-EP-FILTER/endpoint_groups?name=%(name)s' % { + 'name': 'endpoint_group_name' + } r = self.get(url, expected_status=http.client.OK) self.assertNotEmpty(r.result['endpoint_groups']) self.assertEqual(1, len(r.result['endpoint_groups'])) - self.assertEqual(endpoint_group_id, - r.result['endpoint_groups'][0].get('id')) + self.assertEqual( + endpoint_group_id, r.result['endpoint_groups'][0].get('id') + ) self.head(url, expected_status=http.client.OK) # try to retrieve a non existant one - url = ('/OS-EP-FILTER/endpoint_groups?name=%(name)s' % - {'name': 'fake'}) + url = '/OS-EP-FILTER/endpoint_groups?name=%(name)s' % {'name': 'fake'} r = self.get(url, expected_status=http.client.OK) self.assertEqual(0, len(r.result['endpoint_groups'])) @@ -1077,16 +1236,19 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ # create an endpoint group to work with endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # associate endpoint group with project - self._create_endpoint_group_project_association(endpoint_group_id, - self.project_id) + self._create_endpoint_group_project_association( + endpoint_group_id, self.project_id + ) # recover list of projects associated with endpoint group - url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/projects' % - {'endpoint_group_id': endpoint_group_id}) + url = ( + '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' + '/projects' % {'endpoint_group_id': endpoint_group_id} + ) self.get(url, expected_status=http.client.OK) self.head(url, expected_status=http.client.OK) @@ -1098,16 +1260,14 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): """ # create a service service_ref = unit.new_service_ref() - response = self.post( - '/services', - body={'service': service_ref}) + response = self.post('/services', body={'service': service_ref}) service_id = response.result['service']['id'] # create an endpoint - endpoint_ref = unit.new_endpoint_ref(service_id=service_id, - interface='public', - region_id=self.region_id) + endpoint_ref = unit.new_endpoint_ref( + service_id=service_id, interface='public', region_id=self.region_id + ) response = self.post('/endpoints', body={'endpoint': endpoint_ref}) endpoint_id = response.result['endpoint']['id'] @@ -1115,15 +1275,19 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY) body['endpoint_group']['filters'] = {'service_id': service_id} endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, body) + self.DEFAULT_ENDPOINT_GROUP_URL, body + ) # create association - self._create_endpoint_group_project_association(endpoint_group_id, - self.project_id) + self._create_endpoint_group_project_association( + endpoint_group_id, self.project_id + ) # recover list of endpoints associated with endpoint group - url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/endpoints' % {'endpoint_group_id': endpoint_group_id}) + url = ( + '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' + '/endpoints' % {'endpoint_group_id': endpoint_group_id} + ) r = self.get(url, expected_status=http.client.OK) self.assertNotEmpty(r.result['endpoints']) self.assertEqual(endpoint_id, r.result['endpoints'][0].get('id')) @@ -1142,9 +1306,9 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): # create additional endpoints self._create_endpoint_and_associations( - self.default_domain_project_id, service_id2) - self._create_endpoint_and_associations( - self.default_domain_project_id) + self.default_domain_project_id, service_id2 + ) + self._create_endpoint_and_associations(self.default_domain_project_id) # create project and endpoint association with default endpoint: self.put(self.default_request_url) @@ -1153,15 +1317,18 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY) body['endpoint_group']['filters'] = {'service_id': service_id2} endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, body) + self.DEFAULT_ENDPOINT_GROUP_URL, body + ) # associate endpoint group with project self._create_endpoint_group_project_association( - endpoint_group_id, self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) # Now get a list of the filtered endpoints endpoints_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { - 'project_id': self.default_domain_project_id} + 'project_id': self.default_domain_project_id + } r = self.get(endpoints_url, expected_status=http.client.OK) endpoints = self.assertValidEndpointListResponse(r) self.assertEqual(2, len(endpoints)) @@ -1172,18 +1339,20 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): # and "endpoint_filter.sql" backend driver is in place. user_id = uuid.uuid4().hex catalog_list = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertEqual(2, len(catalog_list)) # Now remove project endpoint group association url = self._get_project_endpoint_group_url( - endpoint_group_id, self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) self.delete(url) # Now remove endpoint group url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} + 'endpoint_group_id': endpoint_group_id + } self.delete(url) r = self.get(endpoints_url) @@ -1191,21 +1360,23 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): self.assertEqual(1, len(endpoints)) catalog_list = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertEqual(1, len(catalog_list)) def test_endpoint_group_project_cleanup_with_project(self): # create endpoint group endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # create new project and associate with endpoint_group project_ref = unit.new_project_ref(domain_id=self.domain_id) r = self.post('/projects', body={'project': project_ref}) project = self.assertValidProjectResponse(r, project_ref) - url = self._get_project_endpoint_group_url(endpoint_group_id, - project['id']) + url = self._get_project_endpoint_group_url( + endpoint_group_id, project['id'] + ) self.put(url) # check that we can recover the project endpoint group association @@ -1214,22 +1385,23 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): # Now delete the project and then try and retrieve the project # endpoint group association again - self.delete('/projects/%(project_id)s' % { - 'project_id': project['id']}) + self.delete('/projects/%(project_id)s' % {'project_id': project['id']}) self.get(url, expected_status=http.client.NOT_FOUND) self.head(url, expected_status=http.client.NOT_FOUND) def test_endpoint_group_project_cleanup_with_endpoint_group(self): # create endpoint group endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # create new project and associate with endpoint_group project_ref = unit.new_project_ref(domain_id=self.domain_id) r = self.post('/projects', body={'project': project_ref}) project = self.assertValidProjectResponse(r, project_ref) - url = self._get_project_endpoint_group_url(endpoint_group_id, - project['id']) + url = self._get_project_endpoint_group_url( + endpoint_group_id, project['id'] + ) self.put(url) # check that we can recover the project endpoint group association @@ -1242,11 +1414,13 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): def test_removing_an_endpoint_group_project(self): # create an endpoint group endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # create an endpoint_group project url = self._get_project_endpoint_group_url( - endpoint_group_id, self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) self.put(url) # remove the endpoint group project @@ -1256,22 +1430,26 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): def test_remove_endpoint_group_with_project_association(self): # create an endpoint group endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # create an endpoint_group project project_endpoint_group_url = self._get_project_endpoint_group_url( - endpoint_group_id, self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) self.put(project_endpoint_group_url) # remove endpoint group, the associated endpoint_group project will # be removed as well. - endpoint_group_url = ('/OS-EP-FILTER/endpoint_groups/' - '%(endpoint_group_id)s' - % {'endpoint_group_id': endpoint_group_id}) + endpoint_group_url = ( + '/OS-EP-FILTER/endpoint_groups/' + '%(endpoint_group_id)s' % {'endpoint_group_id': endpoint_group_id} + ) self.delete(endpoint_group_url) self.get(endpoint_group_url, expected_status=http.client.NOT_FOUND) - self.get(project_endpoint_group_url, - expected_status=http.client.NOT_FOUND) + self.get( + project_endpoint_group_url, expected_status=http.client.NOT_FOUND + ) @unit.skip_if_cache_disabled('catalog') def test_add_endpoint_group_to_project_invalidates_catalog_cache(self): @@ -1282,10 +1460,12 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): # this should be done at first since `create_endpoint` will also # invalidate cache. endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='admin', - id=endpoint_id2) + endpoint2 = unit.new_endpoint_ref( + service_id=self.service_id, + region_id=self.region_id, + interface='admin', + id=endpoint_id2, + ) PROVIDERS.catalog_api.create_endpoint(endpoint_id2, endpoint2) # create a project and endpoint association. @@ -1294,52 +1474,54 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): # there is only one endpoint associated with the default project. user_id = uuid.uuid4().hex catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1)) # create an endpoint group. endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # add the endpoint group to default project, bypassing # catalog_api API manager. PROVIDERS.catalog_api.driver.add_endpoint_group_to_project( - endpoint_group_id, - self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) # can get back only one endpoint from the cache, since the catalog # is pulled out from cache. invalid_catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) - self.assertThat(invalid_catalog[0]['endpoints'], - matchers.HasLength(1)) + self.assertThat(invalid_catalog[0]['endpoints'], matchers.HasLength(1)) self.assertEqual(catalog, invalid_catalog) # remove the endpoint group from default project, and add it again via # catalog_api API manager. PROVIDERS.catalog_api.driver.remove_endpoint_group_from_project( - endpoint_group_id, - self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) # add the endpoint group to default project. PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group_id, - self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) # now, it will return 2 endpoints since the cache has been # invalidated. self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2)) - ep_id_list = [catalog[0]['endpoints'][0]['id'], - catalog[0]['endpoints'][1]['id']] + ep_id_list = [ + catalog[0]['endpoints'][0]['id'], + catalog[0]['endpoints'][1]['id'], + ] self.assertCountEqual([self.endpoint_id, endpoint_id2], ep_id_list) @unit.skip_if_cache_disabled('catalog') @@ -1352,10 +1534,12 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): # this should be done at first since `create_endpoint` will also # invalidate cache. endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='admin', - id=endpoint_id2) + endpoint2 = unit.new_endpoint_ref( + service_id=self.service_id, + region_id=self.region_id, + interface='admin', + id=endpoint_id2, + ) PROVIDERS.catalog_api.create_endpoint(endpoint_id2, endpoint2) # create project and endpoint association. @@ -1363,59 +1547,61 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): # create an endpoint group. endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) + self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY + ) # add the endpoint group to default project. PROVIDERS.catalog_api.add_endpoint_group_to_project( - endpoint_group_id, - self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) # should get back two endpoints, one from endpoint project # association, the other one is from endpoint_group project # association. user_id = uuid.uuid4().hex catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2)) - ep_id_list = [catalog[0]['endpoints'][0]['id'], - catalog[0]['endpoints'][1]['id']] + ep_id_list = [ + catalog[0]['endpoints'][0]['id'], + catalog[0]['endpoints'][1]['id'], + ] self.assertCountEqual([self.endpoint_id, endpoint_id2], ep_id_list) # remove endpoint_group project association, bypassing # catalog_api API manager. PROVIDERS.catalog_api.driver.remove_endpoint_group_from_project( - endpoint_group_id, - self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) # still get back two endpoints, since the catalog is pulled out # from cache and the cache haven't been invalidated. invalid_catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) - self.assertThat(invalid_catalog[0]['endpoints'], - matchers.HasLength(2)) + self.assertThat(invalid_catalog[0]['endpoints'], matchers.HasLength(2)) self.assertEqual(catalog, invalid_catalog) # add back the endpoint_group project association and remove it from # manager. PROVIDERS.catalog_api.driver.add_endpoint_group_to_project( - endpoint_group_id, - self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) PROVIDERS.catalog_api.remove_endpoint_group_from_project( - endpoint_group_id, - self.default_domain_project_id) + endpoint_group_id, self.default_domain_project_id + ) # should only get back one endpoint since the cache has been # invalidated after the endpoint_group project association was # removed. catalog = PROVIDERS.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) + user_id, self.default_domain_project_id + ) self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1)) self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id']) @@ -1424,40 +1610,43 @@ class EndpointGroupCRUDTestCase(EndpointFilterTestCase): r = self.post(url, body=body) return r.result['endpoint_group']['id'] - def _create_endpoint_group_project_association(self, - endpoint_group_id, - project_id): - url = self._get_project_endpoint_group_url(endpoint_group_id, - project_id) + def _create_endpoint_group_project_association( + self, endpoint_group_id, project_id + ): + url = self._get_project_endpoint_group_url( + endpoint_group_id, project_id + ) self.put(url) - def _get_project_endpoint_group_url(self, - endpoint_group_id, - project_id): - return ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/projects/%(project_id)s' % - {'endpoint_group_id': endpoint_group_id, - 'project_id': project_id}) + def _get_project_endpoint_group_url(self, endpoint_group_id, project_id): + return ( + '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' + '/projects/%(project_id)s' + % { + 'endpoint_group_id': endpoint_group_id, + 'project_id': project_id, + } + ) def _create_endpoint_and_associations(self, project_id, service_id=None): """Create an endpoint associated with service and project.""" if not service_id: # create a new service service_ref = unit.new_service_ref() - response = self.post( - '/services', body={'service': service_ref}) + response = self.post('/services', body={'service': service_ref}) service_id = response.result['service']['id'] # create endpoint - endpoint_ref = unit.new_endpoint_ref(service_id=service_id, - interface='public', - region_id=self.region_id) + endpoint_ref = unit.new_endpoint_ref( + service_id=service_id, interface='public', region_id=self.region_id + ) response = self.post('/endpoints', body={'endpoint': endpoint_ref}) endpoint = response.result['endpoint'] # now add endpoint to project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': endpoint['id']}) + self.put( + '/OS-EP-FILTER/projects/%(project_id)s' + '/endpoints/%(endpoint_id)s' + % {'project_id': self.project['id'], 'endpoint_id': endpoint['id']} + ) return endpoint diff --git a/keystone/tests/unit/test_auth_plugin.py b/keystone/tests/unit/test_auth_plugin.py index e8c867a2c2..a672cee2b9 100644 --- a/keystone/tests/unit/test_auth_plugin.py +++ b/keystone/tests/unit/test_auth_plugin.py @@ -40,14 +40,17 @@ class SimpleChallengeResponse(base.AuthMethodHandler): raise exception.Unauthorized('Wrong answer') response_data['user_id'] = DEMO_USER_ID - return base.AuthHandlerResponse(status=True, response_body=None, - response_data=response_data) + return base.AuthHandlerResponse( + status=True, response_body=None, response_data=response_data + ) else: return base.AuthHandlerResponse( status=False, response_body={ - "challenge": "What's the name of your high school?"}, - response_data=None) + "challenge": "What's the name of your high school?" + }, + response_data=None, + ) class TestAuthPlugin(unit.SQLDriverOverrides, unit.TestCase): @@ -57,28 +60,33 @@ class TestAuthPlugin(unit.SQLDriverOverrides, unit.TestCase): auth_data = {'methods': [method_name]} auth_data[method_name] = {'test': 'test'} auth_data = {'identity': auth_data} - self.assertRaises(exception.AuthMethodNotSupported, - auth.core.AuthInfo.create, - auth_data) + self.assertRaises( + exception.AuthMethodNotSupported, + auth.core.AuthInfo.create, + auth_data, + ) @mock.patch.object(auth.core, '_get_auth_driver_manager') def test_addition_auth_steps(self, stevedore_mock): simple_challenge_plugin = SimpleChallengeResponse() extension = stevedore.extension.Extension( - name='simple_challenge', entry_point=None, plugin=None, - obj=simple_challenge_plugin + name='simple_challenge', + entry_point=None, + plugin=None, + obj=simple_challenge_plugin, ) test_manager = stevedore.DriverManager.make_test_instance(extension) stevedore_mock.return_value = test_manager self.useFixture( - auth_plugins.ConfigAuthPlugins(self.config_fixture, - methods=[METHOD_NAME])) + auth_plugins.ConfigAuthPlugins( + self.config_fixture, methods=[METHOD_NAME] + ) + ) self.useFixture(auth_plugins.LoadAuthPlugins(METHOD_NAME)) auth_data = {'methods': [METHOD_NAME]} - auth_data[METHOD_NAME] = { - 'test': 'test'} + auth_data[METHOD_NAME] = {'test': 'test'} auth_data = {'identity': auth_data} auth_info = auth.core.AuthInfo.create(auth_data) auth_context = auth.core.AuthContext(method_names=[]) @@ -93,8 +101,7 @@ class TestAuthPlugin(unit.SQLDriverOverrides, unit.TestCase): # test correct response auth_data = {'methods': [METHOD_NAME]} - auth_data[METHOD_NAME] = { - 'response': EXPECTED_RESPONSE} + auth_data[METHOD_NAME] = {'response': EXPECTED_RESPONSE} auth_data = {'identity': auth_data} auth_info = auth.core.AuthInfo.create(auth_data) auth_context = auth.core.AuthContext(method_names=[]) @@ -104,22 +111,25 @@ class TestAuthPlugin(unit.SQLDriverOverrides, unit.TestCase): # test incorrect response auth_data = {'methods': [METHOD_NAME]} - auth_data[METHOD_NAME] = { - 'response': uuid.uuid4().hex} + auth_data[METHOD_NAME] = {'response': uuid.uuid4().hex} auth_data = {'identity': auth_data} auth_info = auth.core.AuthInfo.create(auth_data) auth_context = auth.core.AuthContext(method_names=[]) with self.make_request(): - self.assertRaises(exception.Unauthorized, - authentication.authenticate, - auth_info, - auth_context) + self.assertRaises( + exception.Unauthorized, + authentication.authenticate, + auth_info, + auth_context, + ) def test_duplicate_method(self): # Having the same method twice doesn't cause load_auth_methods to fail. self.useFixture( - auth_plugins.ConfigAuthPlugins(self.config_fixture, - ['external', 'external'])) + auth_plugins.ConfigAuthPlugins( + self.config_fixture, ['external', 'external'] + ) + ) auth.core.load_auth_methods() self.assertIn('external', auth.core.AUTH_METHODS) @@ -145,9 +155,9 @@ class TestMapped(unit.TestCase): return config_files def _test_mapped_invocation_with_method_name(self, method_name): - with mock.patch.object(auth.plugins.mapped.Mapped, - 'authenticate', - return_value=None) as authenticate: + with mock.patch.object( + auth.plugins.mapped.Mapped, 'authenticate', return_value=None + ) as authenticate: auth_data = { 'identity': { 'methods': [method_name], @@ -156,8 +166,8 @@ class TestMapped(unit.TestCase): } auth_info = auth.core.AuthInfo.create(auth_data) auth_context = auth.core.AuthContext( - method_names=[], - user_id=uuid.uuid4().hex) + method_names=[], user_id=uuid.uuid4().hex + ) with self.make_request(): authentication.authenticate(auth_info, auth_context) # make sure Mapped plugin got invoked with the correct payload @@ -173,14 +183,14 @@ class TestMapped(unit.TestCase): auth_data = {'identity': auth_data} auth_context = auth.core.AuthContext( - method_names=[], - user_id=uuid.uuid4().hex) + method_names=[], user_id=uuid.uuid4().hex + ) self.useFixture(auth_plugins.LoadAuthPlugins(method_name)) - with mock.patch.object(auth.plugins.mapped.Mapped, - 'authenticate', - return_value=None) as authenticate: + with mock.patch.object( + auth.plugins.mapped.Mapped, 'authenticate', return_value=None + ) as authenticate: auth_info = auth.core.AuthInfo.create(auth_data) with self.make_request(environ={'REMOTE_USER': 'foo@idp.com'}): authentication.authenticate(auth_info, auth_context) @@ -189,8 +199,9 @@ class TestMapped(unit.TestCase): self.assertEqual(method_name, auth_payload['protocol']) @mock.patch('keystone.auth.plugins.mapped.PROVIDERS') - def test_mapped_without_identity_provider_or_protocol(self, - mock_providers): + def test_mapped_without_identity_provider_or_protocol( + self, mock_providers + ): mock_providers.resource_api = mock.Mock() mock_providers.federation_api = mock.Mock() mock_providers.identity_api = mock.Mock() @@ -202,14 +213,18 @@ class TestMapped(unit.TestCase): auth_payload = {'identity_provider': 'test_provider'} with self.make_request(): self.assertRaises( - exception.ValidationError, test_mapped.authenticate, - auth_payload) + exception.ValidationError, + test_mapped.authenticate, + auth_payload, + ) auth_payload = {'protocol': 'saml2'} with self.make_request(): self.assertRaises( - exception.ValidationError, test_mapped.authenticate, - auth_payload) + exception.ValidationError, + test_mapped.authenticate, + auth_payload, + ) def test_supporting_multiple_methods(self): method_names = ('saml2', 'openid', 'x509', 'mapped') diff --git a/keystone/tests/unit/test_backend_endpoint_policy.py b/keystone/tests/unit/test_backend_endpoint_policy.py index 7533b67567..64c4cab489 100644 --- a/keystone/tests/unit/test_backend_endpoint_policy.py +++ b/keystone/tests/unit/test_backend_endpoint_policy.py @@ -26,19 +26,15 @@ PROVIDERS = provider_api.ProviderAPIs class PolicyAssociationTests(object): def _assert_correct_policy(self, endpoint, policy): - ref = ( - PROVIDERS.endpoint_policy_api.get_policy_for_endpoint( - endpoint['id'] - ) + ref = PROVIDERS.endpoint_policy_api.get_policy_for_endpoint( + endpoint['id'] ) self.assertEqual(policy['id'], ref['id']) def _assert_correct_endpoints(self, policy, endpoint_list): endpoint_id_list = [ep['id'] for ep in endpoint_list] - endpoints = ( - PROVIDERS.endpoint_policy_api.list_endpoints_for_policy( - policy['id'] - ) + endpoints = PROVIDERS.endpoint_policy_api.list_endpoints_for_policy( + policy['id'] ) self.assertThat(endpoints, matchers.HasLength(len(endpoint_list))) for endpoint in endpoints: @@ -60,13 +56,17 @@ class PolicyAssociationTests(object): 5 - region 2, Service 0 """ + def new_endpoint(region_id, service_id): - endpoint = unit.new_endpoint_ref(interface='test', - region_id=region_id, - service_id=service_id, - url='/url') - self.endpoint.append(PROVIDERS.catalog_api.create_endpoint( - endpoint['id'], endpoint)) + endpoint = unit.new_endpoint_ref( + interface='test', + region_id=region_id, + service_id=service_id, + url='/url', + ) + self.endpoint.append( + PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) + ) self.policy = [] self.endpoint = [] @@ -98,99 +98,116 @@ class PolicyAssociationTests(object): def test_policy_to_endpoint_association_crud(self): PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) + self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'] + ) PROVIDERS.endpoint_policy_api.check_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) + self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'] + ) PROVIDERS.endpoint_policy_api.delete_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) + self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'] + ) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.check_policy_association, self.policy[0]['id'], - endpoint_id=self.endpoint[0]['id'] + endpoint_id=self.endpoint[0]['id'], ) def test_overwriting_policy_to_endpoint_association(self): PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) + self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'] + ) PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[1]['id'], endpoint_id=self.endpoint[0]['id']) + self.policy[1]['id'], endpoint_id=self.endpoint[0]['id'] + ) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.check_policy_association, self.policy[0]['id'], - endpoint_id=self.endpoint[0]['id'] + endpoint_id=self.endpoint[0]['id'], ) PROVIDERS.endpoint_policy_api.check_policy_association( - self.policy[1]['id'], endpoint_id=self.endpoint[0]['id']) + self.policy[1]['id'], endpoint_id=self.endpoint[0]['id'] + ) def test_invalid_policy_to_endpoint_association(self): self.assertRaises( exception.InvalidPolicyAssociation, PROVIDERS.endpoint_policy_api.create_policy_association, - self.policy[0]['id'] + self.policy[0]['id'], ) self.assertRaises( exception.InvalidPolicyAssociation, PROVIDERS.endpoint_policy_api.create_policy_association, self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'], - region_id=self.region[0]['id'] + region_id=self.region[0]['id'], ) self.assertRaises( exception.InvalidPolicyAssociation, PROVIDERS.endpoint_policy_api.create_policy_association, self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'], - service_id=self.service[0]['id'] + service_id=self.service[0]['id'], ) self.assertRaises( exception.InvalidPolicyAssociation, PROVIDERS.endpoint_policy_api.create_policy_association, self.policy[0]['id'], - region_id=self.region[0]['id'] + region_id=self.region[0]['id'], ) def test_policy_to_explicit_endpoint_association(self): # Associate policy 0 with endpoint 0 PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) + self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'] + ) self._assert_correct_policy(self.endpoint[0], self.policy[0]) self._assert_correct_endpoints(self.policy[0], [self.endpoint[0]]) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.get_policy_for_endpoint, - uuid.uuid4().hex + uuid.uuid4().hex, ) def test_policy_to_service_association(self): PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id']) + self.policy[0]['id'], service_id=self.service[0]['id'] + ) PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[1]['id'], service_id=self.service[1]['id']) + self.policy[1]['id'], service_id=self.service[1]['id'] + ) # Endpoints 0 and 5 are part of service 0 self._assert_correct_policy(self.endpoint[0], self.policy[0]) self._assert_correct_policy(self.endpoint[5], self.policy[0]) self._assert_correct_endpoints( - self.policy[0], [self.endpoint[0], self.endpoint[5]]) + self.policy[0], [self.endpoint[0], self.endpoint[5]] + ) # Endpoints 1 and 2 are part of service 1 self._assert_correct_policy(self.endpoint[1], self.policy[1]) self._assert_correct_policy(self.endpoint[2], self.policy[1]) self._assert_correct_endpoints( - self.policy[1], [self.endpoint[1], self.endpoint[2]]) + self.policy[1], [self.endpoint[1], self.endpoint[2]] + ) def test_policy_to_region_and_service_association(self): PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id'], - region_id=self.region[0]['id']) + self.policy[0]['id'], + service_id=self.service[0]['id'], + region_id=self.region[0]['id'], + ) PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[1]['id'], service_id=self.service[1]['id'], - region_id=self.region[1]['id']) + self.policy[1]['id'], + service_id=self.service[1]['id'], + region_id=self.region[1]['id'], + ) PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[2]['id'], service_id=self.service[2]['id'], - region_id=self.region[2]['id']) + self.policy[2]['id'], + service_id=self.service[2]['id'], + region_id=self.region[2]['id'], + ) # Endpoint 0 is in region 0 with service 0, so should get policy 0 self._assert_correct_policy(self.endpoint[0], self.policy[0]) @@ -201,86 +218,97 @@ class PolicyAssociationTests(object): # Looking the other way round, policy 2 should only be in use by # endpoint 4, since that's the only endpoint in region 2 with the # correct service - self._assert_correct_endpoints( - self.policy[2], [self.endpoint[4]]) + self._assert_correct_endpoints(self.policy[2], [self.endpoint[4]]) # Policy 1 should only be in use by endpoint 2, since that's the only # endpoint in region 1 (and region 2 below it) with the correct service - self._assert_correct_endpoints( - self.policy[1], [self.endpoint[2]]) + self._assert_correct_endpoints(self.policy[1], [self.endpoint[2]]) # Policy 0 should be in use by endpoint 0, as well as 5 (since 5 is # of the correct service and in region 2 below it) self._assert_correct_endpoints( - self.policy[0], [self.endpoint[0], self.endpoint[5]]) + self.policy[0], [self.endpoint[0], self.endpoint[5]] + ) def test_delete_association_by_entity(self): PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) + self.policy[0]['id'], endpoint_id=self.endpoint[0]['id'] + ) PROVIDERS.endpoint_policy_api.delete_association_by_endpoint( - self.endpoint[0]['id']) + self.endpoint[0]['id'] + ) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.check_policy_association, self.policy[0]['id'], - endpoint_id=self.endpoint[0]['id'] + endpoint_id=self.endpoint[0]['id'], ) # Make sure deleting it again is silent - since this method is used # in response to notifications by the controller. PROVIDERS.endpoint_policy_api.delete_association_by_endpoint( - self.endpoint[0]['id']) + self.endpoint[0]['id'] + ) # Now try with service - ensure both combined region & service # associations and explicit service ones are removed PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id'], - region_id=self.region[0]['id']) + self.policy[0]['id'], + service_id=self.service[0]['id'], + region_id=self.region[0]['id'], + ) PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[1]['id'], service_id=self.service[0]['id'], - region_id=self.region[1]['id']) + self.policy[1]['id'], + service_id=self.service[0]['id'], + region_id=self.region[1]['id'], + ) PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id']) + self.policy[0]['id'], service_id=self.service[0]['id'] + ) PROVIDERS.endpoint_policy_api.delete_association_by_service( - self.service[0]['id']) + self.service[0]['id'] + ) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.check_policy_association, self.policy[0]['id'], service_id=self.service[0]['id'], - region_id=self.region[0]['id'] + region_id=self.region[0]['id'], ) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.check_policy_association, self.policy[1]['id'], service_id=self.service[0]['id'], - region_id=self.region[1]['id'] + region_id=self.region[1]['id'], ) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.check_policy_association, self.policy[0]['id'], - service_id=self.service[0]['id'] + service_id=self.service[0]['id'], ) # Finally, check delete by region PROVIDERS.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id'], - region_id=self.region[0]['id']) + self.policy[0]['id'], + service_id=self.service[0]['id'], + region_id=self.region[0]['id'], + ) PROVIDERS.endpoint_policy_api.delete_association_by_region( - self.region[0]['id']) + self.region[0]['id'] + ) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.check_policy_association, self.policy[0]['id'], service_id=self.service[0]['id'], - region_id=self.region[0]['id'] + region_id=self.region[0]['id'], ) self.assertRaises( exception.NotFound, PROVIDERS.endpoint_policy_api.check_policy_association, self.policy[0]['id'], - service_id=self.service[0]['id'] + service_id=self.service[0]['id'], ) diff --git a/keystone/tests/unit/test_backend_endpoint_policy_sql.py b/keystone/tests/unit/test_backend_endpoint_policy_sql.py index 134a03f02d..c86ce06ff5 100644 --- a/keystone/tests/unit/test_backend_endpoint_policy_sql.py +++ b/keystone/tests/unit/test_backend_endpoint_policy_sql.py @@ -21,17 +21,20 @@ class SqlPolicyAssociationTable(test_backend_sql.SqlModels): """Set of tests for checking SQL Policy Association Mapping.""" def test_policy_association_mapping(self): - cols = (('id', sql.String, 64), - ('policy_id', sql.String, 64), - ('endpoint_id', sql.String, 64), - ('service_id', sql.String, 64), - ('region_id', sql.String, 64)) + cols = ( + ('id', sql.String, 64), + ('policy_id', sql.String, 64), + ('endpoint_id', sql.String, 64), + ('service_id', sql.String, 64), + ('region_id', sql.String, 64), + ) self.assertExpectedSchema('policy_association', cols) class SqlPolicyAssociationTests( test_backend_sql.SqlTests, - test_backend_endpoint_policy.PolicyAssociationTests): + test_backend_endpoint_policy.PolicyAssociationTests, +): def load_fixtures(self, fixtures): super(SqlPolicyAssociationTests, self).load_fixtures(fixtures) diff --git a/keystone/tests/unit/test_backend_federation_sql.py b/keystone/tests/unit/test_backend_federation_sql.py index 9700ba1ce5..26dbaab4a8 100644 --- a/keystone/tests/unit/test_backend_federation_sql.py +++ b/keystone/tests/unit/test_backend_federation_sql.py @@ -20,36 +20,43 @@ class SqlFederation(test_backend_sql.SqlModels): """Set of tests for checking SQL Federation.""" def test_identity_provider(self): - cols = (('id', sql.String, 64), - ('domain_id', sql.String, 64), - ('enabled', sql.Boolean, None), - ('description', sql.Text, None), - ('authorization_ttl', sql.Integer, None)) + cols = ( + ('id', sql.String, 64), + ('domain_id', sql.String, 64), + ('enabled', sql.Boolean, None), + ('description', sql.Text, None), + ('authorization_ttl', sql.Integer, None), + ) self.assertExpectedSchema('identity_provider', cols) def test_idp_remote_ids(self): - cols = (('idp_id', sql.String, 64), - ('remote_id', sql.String, 255)) + cols = (('idp_id', sql.String, 64), ('remote_id', sql.String, 255)) self.assertExpectedSchema('idp_remote_ids', cols) def test_federated_protocol(self): - cols = (('id', sql.String, 64), - ('idp_id', sql.String, 64), - ('mapping_id', sql.String, 64), - ('remote_id_attribute', sql.String, 64)) + cols = ( + ('id', sql.String, 64), + ('idp_id', sql.String, 64), + ('mapping_id', sql.String, 64), + ('remote_id_attribute', sql.String, 64), + ) self.assertExpectedSchema('federation_protocol', cols) def test_mapping(self): - cols = (('id', sql.String, 64), - ('rules', sql.JsonBlob, None), - ('schema_version', sql.String, 5)) + cols = ( + ('id', sql.String, 64), + ('rules', sql.JsonBlob, None), + ('schema_version', sql.String, 5), + ) self.assertExpectedSchema('mapping', cols) def test_service_provider(self): - cols = (('auth_url', sql.String, 256), - ('id', sql.String, 64), - ('enabled', sql.Boolean, None), - ('description', sql.Text, None), - ('relay_state_prefix', sql.String, 256), - ('sp_url', sql.String, 256)) + cols = ( + ('auth_url', sql.String, 256), + ('id', sql.String, 64), + ('enabled', sql.Boolean, None), + ('description', sql.Text, None), + ('relay_state_prefix', sql.String, 256), + ('sp_url', sql.String, 256), + ) self.assertExpectedSchema('service_provider', cols) diff --git a/keystone/tests/unit/test_backend_id_mapping_sql.py b/keystone/tests/unit/test_backend_id_mapping_sql.py index 7df4ad3d4c..6b87068c52 100644 --- a/keystone/tests/unit/test_backend_id_mapping_sql.py +++ b/keystone/tests/unit/test_backend_id_mapping_sql.py @@ -31,10 +31,12 @@ class SqlIDMappingTable(test_backend_sql.SqlModels): """Set of tests for checking SQL Identity ID Mapping.""" def test_id_mapping(self): - cols = (('public_id', sql.String, 64), - ('domain_id', sql.String, 64), - ('local_id', sql.String, 255), - ('entity_type', sql.Enum, None)) + cols = ( + ('public_id', sql.String, 64), + ('domain_id', sql.String, 64), + ('local_id', sql.String, 255), + ('entity_type', sql.Enum, None), + ) self.assertExpectedSchema('id_mapping', cols) @@ -78,12 +80,16 @@ class SqlIDMapping(test_backend_sql.SqlTests): initial_mappings = len(mapping_sql.list_id_mappings()) local_id1 = uuid.uuid4().hex local_id2 = uuid.uuid4().hex - local_entity1 = {'domain_id': self.domainA['id'], - 'local_id': local_id1, - 'entity_type': mapping.EntityType.USER} - local_entity2 = {'domain_id': self.domainB['id'], - 'local_id': local_id2, - 'entity_type': mapping.EntityType.GROUP} + local_entity1 = { + 'domain_id': self.domainA['id'], + 'local_id': local_id1, + 'entity_type': mapping.EntityType.USER, + } + local_entity2 = { + 'domain_id': self.domainB['id'], + 'local_id': local_id2, + 'entity_type': mapping.EntityType.GROUP, + } # Check no mappings for the new local entities self.assertIsNone( @@ -96,12 +102,16 @@ class SqlIDMapping(test_backend_sql.SqlTests): # Create the new mappings and then read them back public_id1 = PROVIDERS.id_mapping_api.create_id_mapping(local_entity1) public_id2 = PROVIDERS.id_mapping_api.create_id_mapping(local_entity2) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 2)) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 2), + ) self.assertEqual( - public_id1, PROVIDERS.id_mapping_api.get_public_id(local_entity1)) + public_id1, PROVIDERS.id_mapping_api.get_public_id(local_entity1) + ) self.assertEqual( - public_id2, PROVIDERS.id_mapping_api.get_public_id(local_entity2)) + public_id2, PROVIDERS.id_mapping_api.get_public_id(local_entity2) + ) local_id_ref = PROVIDERS.id_mapping_api.get_id_mapping(public_id1) self.assertEqual(self.domainA['id'], local_id_ref['domain_id']) @@ -120,74 +130,98 @@ class SqlIDMapping(test_backend_sql.SqlTests): # Create another mappings, this time specifying a public ID to use new_public_id = uuid.uuid4().hex public_id3 = PROVIDERS.id_mapping_api.create_id_mapping( - {'domain_id': self.domainB['id'], 'local_id': local_id2, - 'entity_type': mapping.EntityType.USER}, - public_id=new_public_id) + { + 'domain_id': self.domainB['id'], + 'local_id': local_id2, + 'entity_type': mapping.EntityType.USER, + }, + public_id=new_public_id, + ) self.assertEqual(new_public_id, public_id3) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 3)) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 3), + ) # Delete the mappings we created, and make sure the mapping count # goes back to where it was PROVIDERS.id_mapping_api.delete_id_mapping(public_id1) PROVIDERS.id_mapping_api.delete_id_mapping(public_id2) PROVIDERS.id_mapping_api.delete_id_mapping(public_id3) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings)) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings), + ) def test_id_mapping_handles_unicode(self): initial_mappings = len(mapping_sql.list_id_mappings()) local_id = u'fäké1' - local_entity = {'domain_id': self.domainA['id'], - 'local_id': local_id, - 'entity_type': mapping.EntityType.USER} + local_entity = { + 'domain_id': self.domainA['id'], + 'local_id': local_id, + 'entity_type': mapping.EntityType.USER, + } # Check no mappings for the new local entity self.assertIsNone(PROVIDERS.id_mapping_api.get_public_id(local_entity)) # Create the new mapping and then read it back public_id = PROVIDERS.id_mapping_api.create_id_mapping(local_entity) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 1)) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 1), + ) self.assertEqual( - public_id, PROVIDERS.id_mapping_api.get_public_id(local_entity)) + public_id, PROVIDERS.id_mapping_api.get_public_id(local_entity) + ) def test_id_mapping_handles_bytes(self): initial_mappings = len(mapping_sql.list_id_mappings()) local_id = b'FaKeID' - local_entity = {'domain_id': self.domainA['id'], - 'local_id': local_id, - 'entity_type': mapping.EntityType.USER} + local_entity = { + 'domain_id': self.domainA['id'], + 'local_id': local_id, + 'entity_type': mapping.EntityType.USER, + } # Check no mappings for the new local entity self.assertIsNone(PROVIDERS.id_mapping_api.get_public_id(local_entity)) # Create the new mapping and then read it back public_id = PROVIDERS.id_mapping_api.create_id_mapping(local_entity) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 1)) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 1), + ) self.assertEqual( - public_id, PROVIDERS.id_mapping_api.get_public_id(local_entity)) + public_id, PROVIDERS.id_mapping_api.get_public_id(local_entity) + ) def test_id_mapping_handles_ids_greater_than_64_characters(self): initial_mappings = len(mapping_sql.list_id_mappings()) local_id = 'Aa' * 100 - local_entity = {'domain_id': self.domainA['id'], - 'local_id': local_id, - 'entity_type': mapping.EntityType.GROUP} + local_entity = { + 'domain_id': self.domainA['id'], + 'local_id': local_id, + 'entity_type': mapping.EntityType.GROUP, + } # Check no mappings for the new local entity self.assertIsNone(PROVIDERS.id_mapping_api.get_public_id(local_entity)) # Create the new mapping and then read it back public_id = PROVIDERS.id_mapping_api.create_id_mapping(local_entity) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 1)) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 1), + ) self.assertEqual( - public_id, PROVIDERS.id_mapping_api.get_public_id(local_entity)) + public_id, PROVIDERS.id_mapping_api.get_public_id(local_entity) + ) self.assertEqual( local_id, - PROVIDERS.id_mapping_api.get_id_mapping(public_id)['local_id']) + PROVIDERS.id_mapping_api.get_id_mapping(public_id)['local_id'], + ) def test_delete_public_id_is_silent(self): # Test that deleting an invalid public key is silent @@ -203,59 +237,96 @@ class SqlIDMapping(test_backend_sql.SqlTests): # Create five mappings,two in domainA, three in domainB PROVIDERS.id_mapping_api.create_id_mapping( - {'domain_id': self.domainA['id'], 'local_id': local_id1, - 'entity_type': mapping.EntityType.USER}) + { + 'domain_id': self.domainA['id'], + 'local_id': local_id1, + 'entity_type': mapping.EntityType.USER, + } + ) PROVIDERS.id_mapping_api.create_id_mapping( - {'domain_id': self.domainA['id'], 'local_id': local_id2, - 'entity_type': mapping.EntityType.USER}) + { + 'domain_id': self.domainA['id'], + 'local_id': local_id2, + 'entity_type': mapping.EntityType.USER, + } + ) public_id3 = PROVIDERS.id_mapping_api.create_id_mapping( - {'domain_id': self.domainB['id'], 'local_id': local_id3, - 'entity_type': mapping.EntityType.GROUP}) + { + 'domain_id': self.domainB['id'], + 'local_id': local_id3, + 'entity_type': mapping.EntityType.GROUP, + } + ) public_id4 = PROVIDERS.id_mapping_api.create_id_mapping( - {'domain_id': self.domainB['id'], 'local_id': local_id4, - 'entity_type': mapping.EntityType.USER}) + { + 'domain_id': self.domainB['id'], + 'local_id': local_id4, + 'entity_type': mapping.EntityType.USER, + } + ) public_id5 = PROVIDERS.id_mapping_api.create_id_mapping( - {'domain_id': self.domainB['id'], 'local_id': local_id5, - 'entity_type': mapping.EntityType.USER}) + { + 'domain_id': self.domainB['id'], + 'local_id': local_id5, + 'entity_type': mapping.EntityType.USER, + } + ) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 5)) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 5), + ) # Purge mappings for domainA, should be left with those in B PROVIDERS.id_mapping_api.purge_mappings( - {'domain_id': self.domainA['id']}) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 3)) + {'domain_id': self.domainA['id']} + ) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 3), + ) PROVIDERS.id_mapping_api.get_id_mapping(public_id3) PROVIDERS.id_mapping_api.get_id_mapping(public_id4) PROVIDERS.id_mapping_api.get_id_mapping(public_id5) # Purge mappings for type Group, should purge one more PROVIDERS.id_mapping_api.purge_mappings( - {'entity_type': mapping.EntityType.GROUP}) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 2)) + {'entity_type': mapping.EntityType.GROUP} + ) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 2), + ) PROVIDERS.id_mapping_api.get_id_mapping(public_id4) PROVIDERS.id_mapping_api.get_id_mapping(public_id5) # Purge mapping for a specific local identifier PROVIDERS.id_mapping_api.purge_mappings( - {'domain_id': self.domainB['id'], 'local_id': local_id4, - 'entity_type': mapping.EntityType.USER}) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 1)) + { + 'domain_id': self.domainB['id'], + 'local_id': local_id4, + 'entity_type': mapping.EntityType.USER, + } + ) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings + 1), + ) PROVIDERS.id_mapping_api.get_id_mapping(public_id5) # Purge mappings the remaining mappings PROVIDERS.id_mapping_api.purge_mappings({}) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings)) + self.assertThat( + mapping_sql.list_id_mappings(), + matchers.HasLength(initial_mappings), + ) def test_create_duplicate_mapping(self): local_entity = { 'domain_id': self.domainA['id'], 'local_id': uuid.uuid4().hex, - 'entity_type': mapping.EntityType.USER} + 'entity_type': mapping.EntityType.USER, + } public_id1 = PROVIDERS.id_mapping_api.create_id_mapping(local_entity) # second call should be successful and return the same @@ -266,15 +337,18 @@ class SqlIDMapping(test_backend_sql.SqlTests): # even if public_id was specified, it should not be used, # and still the same public_id should be returned public_id3 = PROVIDERS.id_mapping_api.create_id_mapping( - local_entity, public_id=uuid.uuid4().hex) + local_entity, public_id=uuid.uuid4().hex + ) self.assertEqual(public_id1, public_id3) @unit.skip_if_cache_disabled('identity') def test_cache_when_id_mapping_crud(self): local_id = uuid.uuid4().hex - local_entity = {'domain_id': self.domainA['id'], - 'local_id': local_id, - 'entity_type': mapping.EntityType.USER} + local_entity = { + 'domain_id': self.domainA['id'], + 'local_id': local_id, + 'entity_type': mapping.EntityType.USER, + } # Check no mappings for the new local entity self.assertIsNone(PROVIDERS.id_mapping_api.get_public_id(local_entity)) @@ -282,7 +356,8 @@ class SqlIDMapping(test_backend_sql.SqlTests): # Create new mappings, and it should be in the cache after created public_id = PROVIDERS.id_mapping_api.create_id_mapping(local_entity) self.assertEqual( - public_id, PROVIDERS.id_mapping_api.get_public_id(local_entity)) + public_id, PROVIDERS.id_mapping_api.get_public_id(local_entity) + ) local_id_ref = PROVIDERS.id_mapping_api.get_id_mapping(public_id) self.assertEqual(self.domainA['id'], local_id_ref['domain_id']) self.assertEqual(local_id, local_id_ref['local_id']) @@ -302,21 +377,31 @@ class SqlIDMapping(test_backend_sql.SqlTests): local_id5 = uuid.uuid4().hex # Create five mappings,two in domainA, three in domainB - local_entity1 = {'domain_id': self.domainA['id'], - 'local_id': local_id1, - 'entity_type': mapping.EntityType.USER} - local_entity2 = {'domain_id': self.domainA['id'], - 'local_id': local_id2, - 'entity_type': mapping.EntityType.USER} - local_entity3 = {'domain_id': self.domainB['id'], - 'local_id': local_id3, - 'entity_type': mapping.EntityType.GROUP} - local_entity4 = {'domain_id': self.domainB['id'], - 'local_id': local_id4, - 'entity_type': mapping.EntityType.USER} - local_entity5 = {'domain_id': self.domainB['id'], - 'local_id': local_id5, - 'entity_type': mapping.EntityType.USER} + local_entity1 = { + 'domain_id': self.domainA['id'], + 'local_id': local_id1, + 'entity_type': mapping.EntityType.USER, + } + local_entity2 = { + 'domain_id': self.domainA['id'], + 'local_id': local_id2, + 'entity_type': mapping.EntityType.USER, + } + local_entity3 = { + 'domain_id': self.domainB['id'], + 'local_id': local_id3, + 'entity_type': mapping.EntityType.GROUP, + } + local_entity4 = { + 'domain_id': self.domainB['id'], + 'local_id': local_id4, + 'entity_type': mapping.EntityType.USER, + } + local_entity5 = { + 'domain_id': self.domainB['id'], + 'local_id': local_id5, + 'entity_type': mapping.EntityType.USER, + } PROVIDERS.id_mapping_api.create_id_mapping(local_entity1) PROVIDERS.id_mapping_api.create_id_mapping(local_entity2) @@ -326,7 +411,8 @@ class SqlIDMapping(test_backend_sql.SqlTests): # Purge mappings for domainA, should be left with those in B PROVIDERS.id_mapping_api.purge_mappings( - {'domain_id': self.domainA['id']}) + {'domain_id': self.domainA['id']} + ) self.assertIsNone( PROVIDERS.id_mapping_api.get_public_id(local_entity1) ) @@ -336,15 +422,20 @@ class SqlIDMapping(test_backend_sql.SqlTests): # Purge mappings for type Group, should purge one more PROVIDERS.id_mapping_api.purge_mappings( - {'entity_type': mapping.EntityType.GROUP}) + {'entity_type': mapping.EntityType.GROUP} + ) self.assertIsNone( PROVIDERS.id_mapping_api.get_public_id(local_entity3) ) # Purge mapping for a specific local identifier PROVIDERS.id_mapping_api.purge_mappings( - {'domain_id': self.domainB['id'], 'local_id': local_id4, - 'entity_type': mapping.EntityType.USER}) + { + 'domain_id': self.domainB['id'], + 'local_id': local_id4, + 'entity_type': mapping.EntityType.USER, + } + ) self.assertIsNone( PROVIDERS.id_mapping_api.get_public_id(local_entity4) ) @@ -359,16 +450,26 @@ class SqlIDMapping(test_backend_sql.SqlTests): # Create five mappings: # two users in domainA, one group and two users in domainB local_entities = [ - {'domain_id': self.domainA['id'], - 'entity_type': mapping.EntityType.USER}, - {'domain_id': self.domainA['id'], - 'entity_type': mapping.EntityType.USER}, - {'domain_id': self.domainB['id'], - 'entity_type': mapping.EntityType.GROUP}, - {'domain_id': self.domainB['id'], - 'entity_type': mapping.EntityType.USER}, - {'domain_id': self.domainB['id'], - 'entity_type': mapping.EntityType.USER} + { + 'domain_id': self.domainA['id'], + 'entity_type': mapping.EntityType.USER, + }, + { + 'domain_id': self.domainA['id'], + 'entity_type': mapping.EntityType.USER, + }, + { + 'domain_id': self.domainB['id'], + 'entity_type': mapping.EntityType.GROUP, + }, + { + 'domain_id': self.domainB['id'], + 'entity_type': mapping.EntityType.USER, + }, + { + 'domain_id': self.domainB['id'], + 'entity_type': mapping.EntityType.USER, + }, ] for e in local_entities: e['local_id'] = uuid.uuid4().hex @@ -402,8 +503,9 @@ class SqlIDMapping(test_backend_sql.SqlTests): self.domainB['id'], entity_type=mapping.EntityType.USER ) ) - domain_b_mappings_user = [m.to_dict() - for m in domain_b_mappings_user] + domain_b_mappings_user = [ + m.to_dict() for m in domain_b_mappings_user + ] self.assertCountEqual(local_entities[-2:], domain_b_mappings_user) def test_get_domain_mapping_list_by_group_entity_type(self): diff --git a/keystone/tests/unit/test_backend_ldap.py b/keystone/tests/unit/test_backend_ldap.py index 84ec1a6e5c..5935da4b11 100644 --- a/keystone/tests/unit/test_backend_ldap.py +++ b/keystone/tests/unit/test_backend_ldap.py @@ -64,15 +64,19 @@ def _assert_backends(testcase, **kwargs): return entrypoint.resolve() def _load_domain_specific_configs(manager): - if (not manager.domain_configs.configured and - CONF.identity.domain_specific_drivers_enabled): + if ( + not manager.domain_configs.configured + and CONF.identity.domain_specific_drivers_enabled + ): manager.domain_configs.setup_domain_drivers( - manager.driver, manager.resource_api) + manager.driver, manager.resource_api + ) - def _assert_equal(expected_cls, observed_cls, subsystem, - domain=None): - msg = ('subsystem %(subsystem)s expected %(expected_cls)r, ' - 'but observed %(observed_cls)r') + def _assert_equal(expected_cls, observed_cls, subsystem, domain=None): + msg = ( + 'subsystem %(subsystem)s expected %(expected_cls)r, ' + 'but observed %(observed_cls)r' + ) if domain: subsystem = '%s[domain=%s]' % (subsystem, domain) assert expected_cls == observed_cls, msg % { @@ -99,18 +103,22 @@ def _assert_backends(testcase, **kwargs): if domain is None: observed_cls = _get_backend_cls(testcase, subsystem) expected_cls = _get_entrypoint_cls( - subsystem, entrypoint_name) + subsystem, entrypoint_name + ) _assert_equal(expected_cls, observed_cls, subsystem) continue observed_cls = _get_domain_specific_backend_cls( - manager, domain) + manager, domain + ) expected_cls = _get_entrypoint_cls(subsystem, entrypoint_name) _assert_equal(expected_cls, observed_cls, subsystem, domain) else: - raise ValueError('%r is not an expected value for entrypoint name' - % entrypoint_name) + raise ValueError( + '%r is not an expected value for entrypoint name' + % entrypoint_name + ) class IdentityTests(identity_tests.IdentityTests): @@ -153,15 +161,18 @@ class IdentityTests(identity_tests.IdentityTests): def test_arbitrary_attributes_are_returned_from_get_user(self): self.skip_test_overrides( - "Using arbitrary attributes doesn't work under LDAP") + "Using arbitrary attributes doesn't work under LDAP" + ) def test_new_arbitrary_attributes_are_returned_from_update_user(self): self.skip_test_overrides( - "Using arbitrary attributes doesn't work under LDAP") + "Using arbitrary attributes doesn't work under LDAP" + ) def test_updated_arbitrary_attributes_are_returned_from_update_user(self): self.skip_test_overrides( - "Using arbitrary attributes doesn't work under LDAP") + "Using arbitrary attributes doesn't work under LDAP" + ) def test_remove_user_from_group(self): self.skip_test_overrides('N/A: LDAP does not support write') @@ -214,7 +225,8 @@ class AssignmentTests(assignment_tests.AssignmentTests): def test_get_roles_for_groups_on_domain(self): self.skip_test_overrides( 'N/A: LDAP does not implement get_roles_for_groups; ' - 'see bug 1333712 for details') + 'see bug 1333712 for details' + ) def test_get_role_by_trustor_and_project(self): self.skip_test_overrides('Domains are read-only against LDAP') @@ -222,7 +234,8 @@ class AssignmentTests(assignment_tests.AssignmentTests): def test_get_roles_for_groups_on_project(self): self.skip_test_overrides( 'N/A: LDAP does not implement get_roles_for_groups; ' - 'see bug 1333712 for details') + 'see bug 1333712 for details' + ) def test_list_domains_for_groups(self): self.skip_test_overrides('N/A: LDAP does not support multiple domains') @@ -230,7 +243,8 @@ class AssignmentTests(assignment_tests.AssignmentTests): def test_list_projects_for_groups(self): self.skip_test_overrides( 'N/A: LDAP does not implement list_projects_for_groups; ' - 'see bug 1333712 for details') + 'see bug 1333712 for details' + ) def test_multi_group_grants_on_project_domain(self): self.skip_test_overrides('N/A: LDAP does not support multiple domains') @@ -351,8 +365,9 @@ class LDAPTestSetup(object): self.assert_backends() -class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, - ResourceTests): +class BaseLDAPIdentity( + LDAPTestSetup, IdentityTests, AssignmentTests, ResourceTests +): def _get_domain_fixture(self): """Return the static domain, since domains in LDAP are read-only.""" @@ -374,21 +389,23 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, return config_files def new_user_ref(self, domain_id, project_id=None, **kwargs): - ref = unit.new_user_ref(domain_id=domain_id, project_id=project_id, - **kwargs) + ref = unit.new_user_ref( + domain_id=domain_id, project_id=project_id, **kwargs + ) if 'id' not in kwargs: del ref['id'] return ref def get_user_enabled_vals(self, user): - user_dn = ( - PROVIDERS.identity_api.driver.user._id_to_dn_string(user['id'])) + user_dn = PROVIDERS.identity_api.driver.user._id_to_dn_string( + user['id'] + ) enabled_attr_name = CONF.ldap.user_enabled_attribute ldap_ = PROVIDERS.identity_api.driver.user.get_connection() - res = ldap_.search_s(user_dn, - ldap.SCOPE_BASE, - u'(sn=%s)' % user['name']) + res = ldap_.search_s( + user_dn, ldap.SCOPE_BASE, u'(sn=%s)' % user['name'] + ) if enabled_attr_name in res[0][1]: return res[0][1][enabled_attr_name] else: @@ -408,9 +425,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, user['password'] = u'fäképass2' PROVIDERS.identity_api.update_user(user['id'], user) - self.assertRaises(exception.Forbidden, - PROVIDERS.identity_api.delete_user, - user['id']) + self.assertRaises( + exception.Forbidden, PROVIDERS.identity_api.delete_user, user['id'] + ) def test_user_filter(self): user_ref = PROVIDERS.identity_api.get_user(self.user_foo['id']) @@ -418,15 +435,18 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, self.assertDictEqual(self.user_foo, user_ref) driver = PROVIDERS.identity_api._select_identity_driver( - user_ref['domain_id']) + user_ref['domain_id'] + ) driver.user.ldap_filter = '(CN=DOES_NOT_MATCH)' # invalidate the cache if the result is cached. PROVIDERS.identity_api.get_user.invalidate( PROVIDERS.identity_api, self.user_foo['id'] ) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, - self.user_foo['id']) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.get_user, + self.user_foo['id'], + ) def test_list_users_by_name_and_with_filter(self): # confirm that the user is not exposed when it does not match the @@ -435,11 +455,13 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, hints.add_filter('name', self.user_foo['name']) domain_id = self.user_foo['domain_id'] driver = PROVIDERS.identity_api._select_identity_driver(domain_id) - driver.user.ldap_filter = ('(|(cn=%s)(cn=%s))' % - (self.user_sna['id'], self.user_two['id'])) + driver.user.ldap_filter = '(|(cn=%s)(cn=%s))' % ( + self.user_sna['id'], + self.user_two['id'], + ) users = PROVIDERS.identity_api.list_users( - domain_scope=self._set_domain_scope(domain_id), - hints=hints) + domain_scope=self._set_domain_scope(domain_id), hints=hints + ) self.assertEqual(0, len(users)) def test_list_groups_by_name_and_with_filter(self): @@ -453,48 +475,53 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, group_names.append(group['name']) # confirm that the groups can all be listed groups = PROVIDERS.identity_api.list_groups( - domain_scope=self._set_domain_scope(domain['id'])) + domain_scope=self._set_domain_scope(domain['id']) + ) self.assertEqual(numgroups, len(groups)) # configure the group filter driver = PROVIDERS.identity_api._select_identity_driver(domain['id']) - driver.group.ldap_filter = ('(|(ou=%s)(ou=%s))' % - tuple(group_names[:2])) + driver.group.ldap_filter = '(|(ou=%s)(ou=%s))' % tuple(group_names[:2]) # confirm that the group filter is working groups = PROVIDERS.identity_api.list_groups( - domain_scope=self._set_domain_scope(domain['id'])) + domain_scope=self._set_domain_scope(domain['id']) + ) self.assertEqual(2, len(groups)) # confirm that a group is not exposed when it does not match the # filter setting in conf even if it is requested by name in group list hints = driver_hints.Hints() hints.add_filter('name', group_names[2]) groups = PROVIDERS.identity_api.list_groups( - domain_scope=self._set_domain_scope(domain['id']), - hints=hints) + domain_scope=self._set_domain_scope(domain['id']), hints=hints + ) self.assertEqual(0, len(groups)) def test_remove_role_grant_from_user_and_project(self): PROVIDERS.assignment_api.create_grant( user_id=self.user_foo['id'], project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.project_baz['id']) + user_id=self.user_foo['id'], project_id=self.project_baz['id'] + ) self.assertDictEqual(self.role_member, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( user_id=self.user_foo['id'], project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.project_baz['id']) + user_id=self.user_foo['id'], project_id=self.project_baz['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - user_id=self.user_foo['id'], - project_id=self.project_baz['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + user_id=self.user_foo['id'], + project_id=self.project_baz['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_and_remove_role_grant_by_group_and_project(self): new_domain = self._get_domain_fixture() @@ -507,34 +534,38 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.project_bar['id']) + group_id=new_group['id'], project_id=self.project_bar['id'] + ) self.assertEqual([], roles_ref) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( group_id=new_group['id'], project_id=self.project_bar['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.project_bar['id']) + group_id=new_group['id'], project_id=self.project_bar['id'] + ) self.assertNotEmpty(roles_ref) self.assertDictEqual(self.role_member, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( group_id=new_group['id'], project_id=self.project_bar['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.project_bar['id']) + group_id=new_group['id'], project_id=self.project_bar['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - PROVIDERS.assignment_api.delete_grant, - group_id=new_group['id'], - project_id=self.project_bar['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.RoleAssignmentNotFound, + PROVIDERS.assignment_api.delete_grant, + group_id=new_group['id'], + project_id=self.project_bar['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_get_and_remove_role_grant_by_group_and_domain(self): # TODO(henry-nash): We should really rewrite the tests in @@ -542,7 +573,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # domains are sourced from, so that we would not need to override such # tests here. This is raised as bug 1373865. new_domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=new_domain['id'],) + new_group = unit.new_group_ref( + domain_id=new_domain['id'], + ) new_group = PROVIDERS.identity_api.create_group(new_group) new_user = self.new_user_ref(domain_id=new_domain['id']) new_user = PROVIDERS.identity_api.create_user(new_user) @@ -551,33 +584,37 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) PROVIDERS.assignment_api.create_grant( group_id=new_group['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertDictEqual(self.role_member, roles_ref[0]) PROVIDERS.assignment_api.delete_grant( group_id=new_group['id'], domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + role_id=default_fixtures.MEMBER_ROLE_ID, + ) roles_ref = PROVIDERS.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) + group_id=new_group['id'], domain_id=new_domain['id'] + ) self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.NotFound, - PROVIDERS.assignment_api.delete_grant, - group_id=new_group['id'], - domain_id=new_domain['id'], - role_id=default_fixtures.MEMBER_ROLE_ID) + self.assertRaises( + exception.NotFound, + PROVIDERS.assignment_api.delete_grant, + group_id=new_group['id'], + domain_id=new_domain['id'], + role_id=default_fixtures.MEMBER_ROLE_ID, + ) def test_list_projects_for_user(self): domain = self._get_domain_fixture() @@ -590,13 +627,15 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # new grant(user1, role_member, project_bar) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) # new grant(user1, role_member, project_baz) PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=self.project_baz['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_baz['id'], + role_id=self.role_member['id'], ) user_projects = PROVIDERS.assignment_api.list_projects_for_user( user1['id'] @@ -614,13 +653,15 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # new grant(group1(user2), role_member, project_bar) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + group_id=group1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) # new grant(group1(user2), role_member, project_baz) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=self.project_baz['id'], - role_id=self.role_member['id'] + group_id=group1['id'], + project_id=self.project_baz['id'], + role_id=self.role_member['id'], ) user_projects = PROVIDERS.assignment_api.list_projects_for_user( user2['id'] @@ -629,8 +670,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # new grant(group1(user2), role_other, project_bar) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=self.project_bar['id'], - role_id=self.role_other['id'] + group_id=group1['id'], + project_id=self.project_bar['id'], + role_id=self.role_other['id'], ) user_projects = PROVIDERS.assignment_api.list_projects_for_user( user2['id'] @@ -652,12 +694,14 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # Now, add grant to user1 and group1 in project_bar PROVIDERS.assignment_api.create_grant( - user_id=user1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + group_id=group1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) # The result is user1 has only one project granted @@ -668,8 +712,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # Now, delete user1 grant into project_bar and check PROVIDERS.assignment_api.delete_grant( - user_id=user1['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + user_id=user1['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) # The result is user1 has only one project granted. @@ -694,28 +739,28 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, project2 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project2['id'], project2) - PROVIDERS.identity_api.add_user_to_group( - new_user['id'], group1['id'] - ) - PROVIDERS.identity_api.add_user_to_group( - new_user['id'], group2['id'] - ) + PROVIDERS.identity_api.add_user_to_group(new_user['id'], group1['id']) + PROVIDERS.identity_api.add_user_to_group(new_user['id'], group2['id']) PROVIDERS.assignment_api.create_grant( - user_id=new_user['id'], project_id=self.project_bar['id'], - role_id=self.role_member['id'] + user_id=new_user['id'], + project_id=self.project_bar['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=new_user['id'], project_id=project1['id'], - role_id=self.role_admin['id'] + user_id=new_user['id'], + project_id=project1['id'], + role_id=self.role_admin['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group2['id'], project_id=project2['id'], - role_id=self.role_admin['id'] + group_id=group2['id'], + project_id=project2['id'], + role_id=self.role_admin['id'], ) user_projects = PROVIDERS.assignment_api.list_projects_for_user( - new_user['id']) + new_user['id'] + ) self.assertEqual(3, len(user_projects)) def test_list_role_assignments_unfiltered(self): @@ -735,11 +780,13 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, PROVIDERS.assignment_api.create_grant( user_id=new_user['id'], project_id=new_project['id'], - role_id=default_fixtures.OTHER_ROLE_ID) + role_id=default_fixtures.OTHER_ROLE_ID, + ) PROVIDERS.assignment_api.create_grant( group_id=new_group['id'], project_id=new_project['id'], - role_id=default_fixtures.ADMIN_ROLE_ID) + role_id=default_fixtures.ADMIN_ROLE_ID, + ) # Read back the list of assignments - check it is gone up by 2 after_assignments = len( @@ -781,22 +828,26 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, user['id'], self.project_baz['id'], role_member['id'] ) driver = PROVIDERS.identity_api._select_identity_driver( - user['domain_id']) + user['domain_id'] + ) driver.user.LDAP_USER = None driver.user.LDAP_PASSWORD = None with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=user['id'], - password=None) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=user['id'], + password=None, + ) @mock.patch.object(versionutils, 'report_deprecated_feature') def test_user_crud(self, mock_deprecator): # NOTE(stevemar): As of the Mitaka release, we now check for calls that # the LDAP write functionality has been deprecated. user_dict = self.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) user = PROVIDERS.identity_api.create_user(user_dict) args, _kwargs = mock_deprecator.call_args self.assertIn("create_user for the LDAP identity backend", args[1]) @@ -862,7 +913,7 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, group_ref = PROVIDERS.identity_api.update_group(group['id'], group) self.assertLessEqual( PROVIDERS.identity_api.get_group(group['id']).items(), - group_ref.items() + group_ref.items(), ) @unit.skip_if_cache_disabled('identity') @@ -881,13 +932,13 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, user_updated = PROVIDERS.identity_api.update_user(ref['id'], user) self.assertLessEqual( PROVIDERS.identity_api.get_user(ref['id']).items(), - user_updated.items() + user_updated.items(), ) self.assertLessEqual( PROVIDERS.identity_api.get_user_by_name( ref['name'], ref['domain_id'] ).items(), - user_updated.items() + user_updated.items(), ) @unit.skip_if_cache_disabled('identity') @@ -903,24 +954,31 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, user_updated = PROVIDERS.identity_api.update_user(ref['id'], user) self.assertLessEqual( PROVIDERS.identity_api.get_user(ref['id']).items(), - user_updated.items() + user_updated.items(), ) self.assertLessEqual( PROVIDERS.identity_api.get_user_by_name( ref['name'], ref['domain_id'] ).items(), - user_updated.items() + user_updated.items(), ) def test_create_user_none_mapping(self): # When create a user where an attribute maps to None, the entry is # created without that attribute and it doesn't fail with a TypeError. driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.attribute_ignore = ['enabled', 'email', - 'projects', 'projectId'] - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id, - project_id='maps_to_none') + CONF.identity.default_domain_id + ) + driver.user.attribute_ignore = [ + 'enabled', + 'email', + 'projects', + 'projectId', + ] + user = self.new_user_ref( + domain_id=CONF.identity.default_domain_id, + project_id='maps_to_none', + ) # If this doesn't raise, then the test is successful. user = PROVIDERS.identity_api.create_user(user) @@ -930,9 +988,14 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # ignored in configuration is implicitly ignored without triggering # an error. driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.attribute_ignore = ['enabled', 'email', - 'projects', 'projectId'] + CONF.identity.default_domain_id + ) + driver.user.attribute_ignore = [ + 'enabled', + 'email', + 'projects', + 'projectId', + ] user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -943,8 +1006,10 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, def test_update_user_name(self): """A user's name cannot be changed through the LDAP driver.""" - self.assertRaises(exception.Conflict, - super(BaseLDAPIdentity, self).test_update_user_name) + self.assertRaises( + exception.Conflict, + super(BaseLDAPIdentity, self).test_update_user_name, + ) def test_user_id_comma(self): """Even if the user has a , in their ID, groups can be listed.""" @@ -954,8 +1019,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # Since we want to fake up this special ID, we'll squirt this # direct into the driver and bypass the manager layer. user_id = u'Doe, John' - user = self.new_user_ref(id=user_id, - domain_id=CONF.identity.default_domain_id) + user = self.new_user_ref( + id=user_id, domain_id=CONF.identity.default_domain_id + ) user = PROVIDERS.identity_api.driver.create_user(user_id, user) # Now we'll use the manager to discover it, which will create a @@ -1001,8 +1067,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # Since we want to fake up this special ID, we'll squirt this # direct into the driver and bypass the manager layer user_id = u'Doe, John' - user = self.new_user_ref(id=user_id, - domain_id=CONF.identity.default_domain_id) + user = self.new_user_ref( + id=user_id, domain_id=CONF.identity.default_domain_id + ) PROVIDERS.identity_api.driver.create_user(user_id, user) # Now we'll use the manager to discover it, which will create a @@ -1033,8 +1100,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # When the server is configured so that the enabled attribute is # ignored for users, users cannot be disabled. - self.config_fixture.config(group='ldap', - user_attribute_ignore=['enabled']) + self.config_fixture.config( + group='ldap', user_attribute_ignore=['enabled'] + ) # Need to re-load backends for the config change to take effect. self.load_backends() @@ -1042,8 +1110,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # Attempt to disable the user. self.assertRaises( exception.ForbiddenAction, - PROVIDERS.identity_api.update_user, self.user_foo['id'], - {'enabled': False} + PROVIDERS.identity_api.update_user, + self.user_foo['id'], + {'enabled': False}, ) user_info = PROVIDERS.identity_api.get_user(self.user_foo['id']) @@ -1056,8 +1125,9 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, # When the server is configured so that the enabled attribute is # ignored for groups, groups cannot be disabled. - self.config_fixture.config(group='ldap', - group_attribute_ignore=['enabled']) + self.config_fixture.config( + group='ldap', group_attribute_ignore=['enabled'] + ) # Need to re-load backends for the config change to take effect. self.load_backends() @@ -1068,9 +1138,12 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, new_group = PROVIDERS.identity_api.create_group(new_group) # Attempt to disable the group. - self.assertRaises(exception.ForbiddenAction, - PROVIDERS.identity_api.update_group, new_group['id'], - {'enabled': False}) + self.assertRaises( + exception.ForbiddenAction, + PROVIDERS.identity_api.update_group, + new_group['id'], + {'enabled': False}, + ) group_info = PROVIDERS.identity_api.get_group(new_group['id']) @@ -1081,70 +1154,92 @@ class BaseLDAPIdentity(LDAPTestSetup, IdentityTests, AssignmentTests, def test_list_role_assignment_by_domain(self): """Multiple domain assignments are not supported.""" self.assertRaises( - (exception.Forbidden, exception.DomainNotFound, - exception.ValidationError), - super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain) + ( + exception.Forbidden, + exception.DomainNotFound, + exception.ValidationError, + ), + super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain, + ) def test_list_role_assignment_by_user_with_domain_group_roles(self): """Multiple domain assignments are not supported.""" self.assertRaises( - (exception.Forbidden, exception.DomainNotFound, - exception.ValidationError), - super(BaseLDAPIdentity, self). - test_list_role_assignment_by_user_with_domain_group_roles) + ( + exception.Forbidden, + exception.DomainNotFound, + exception.ValidationError, + ), + super( + BaseLDAPIdentity, self + ).test_list_role_assignment_by_user_with_domain_group_roles, + ) def test_list_role_assignment_using_sourced_groups_with_domains(self): """Multiple domain assignments are not supported.""" self.assertRaises( - (exception.Forbidden, exception.ValidationError, - exception.DomainNotFound), - super(BaseLDAPIdentity, self). - test_list_role_assignment_using_sourced_groups_with_domains) + ( + exception.Forbidden, + exception.ValidationError, + exception.DomainNotFound, + ), + super( + BaseLDAPIdentity, self + ).test_list_role_assignment_using_sourced_groups_with_domains, + ) def test_create_project_with_domain_id_and_without_parent_id(self): """Multiple domains are not supported.""" self.assertRaises( exception.ValidationError, - super(BaseLDAPIdentity, self). - test_create_project_with_domain_id_and_without_parent_id) + super( + BaseLDAPIdentity, self + ).test_create_project_with_domain_id_and_without_parent_id, + ) def test_create_project_with_domain_id_mismatch_to_parent_domain(self): """Multiple domains are not supported.""" self.assertRaises( exception.ValidationError, - super(BaseLDAPIdentity, self). - test_create_project_with_domain_id_mismatch_to_parent_domain) + super( + BaseLDAPIdentity, self + ).test_create_project_with_domain_id_mismatch_to_parent_domain, + ) def test_remove_foreign_assignments_when_deleting_a_domain(self): """Multiple domains are not supported.""" self.assertRaises( (exception.ValidationError, exception.DomainNotFound), - super(BaseLDAPIdentity, - self).test_remove_foreign_assignments_when_deleting_a_domain) + super( + BaseLDAPIdentity, self + ).test_remove_foreign_assignments_when_deleting_a_domain, + ) class LDAPIdentity(BaseLDAPIdentity): def assert_backends(self): - _assert_backends(self, - assignment='sql', - identity='ldap', - resource='sql') + _assert_backends( + self, assignment='sql', identity='ldap', resource='sql' + ) def test_list_domains(self): domains = PROVIDERS.resource_api.list_domains() default_domain = unit.new_domain_ref( description=u'The default domain', id=CONF.identity.default_domain_id, - name=u'Default') + name=u'Default', + ) self.assertEqual([default_domain], domains) def test_authenticate_wrong_credentials(self): - self.assertRaises(exception.LDAPInvalidCredentialsError, - PROVIDERS.identity_api.driver.user.get_connection, - user='demo', - password='demo', - end_user_auth=True) + self.assertRaises( + exception.LDAPInvalidCredentialsError, + PROVIDERS.identity_api.driver.user.get_connection, + user='demo', + password='demo', + end_user_auth=True, + ) def test_configurable_allowed_project_actions(self): domain = self._get_domain_fixture() @@ -1157,13 +1252,16 @@ class LDAPIdentity(BaseLDAPIdentity): PROVIDERS.resource_api.update_project(project['id'], project) PROVIDERS.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project['id'], + ) def test_user_enable_attribute_mask(self): - self.config_fixture.config(group='ldap', user_enabled_mask=2, - user_enabled_default='512') + self.config_fixture.config( + group='ldap', user_enabled_mask=2, user_enabled_default='512' + ) self.ldapdb.clear() self.load_backends() @@ -1208,15 +1306,19 @@ class LDAPIdentity(BaseLDAPIdentity): self.assertNotIn('enabled_nomask', user_ref) def test_user_enabled_invert(self): - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_default='False') + self.config_fixture.config( + group='ldap', + user_enabled_invert=True, + user_enabled_default='False', + ) self.ldapdb.clear() self.load_backends() user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user2 = self.new_user_ref(enabled=False, - domain_id=CONF.identity.default_domain_id) + user2 = self.new_user_ref( + enabled=False, domain_id=CONF.identity.default_domain_id + ) user3 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -1263,8 +1365,11 @@ class LDAPIdentity(BaseLDAPIdentity): @mock.patch.object(common_ldap.BaseLdap, '_ldap_get') def test_user_enabled_invert_default_str_value(self, mock_ldap_get): - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_default='False') + self.config_fixture.config( + group='ldap', + user_enabled_invert=True, + user_enabled_default='False', + ) # Mock the search results to return an entry with # no enabled value. mock_ldap_get.return_value = ( @@ -1272,8 +1377,8 @@ class LDAPIdentity(BaseLDAPIdentity): { 'sn': [uuid.uuid4().hex], 'email': [uuid.uuid4().hex], - 'cn': ['junk'] - } + 'cn': ['junk'], + }, ) user_api = identity.backends.ldap.UserApi(CONF) @@ -1285,43 +1390,46 @@ class LDAPIdentity(BaseLDAPIdentity): @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'connect') @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'search_s') @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'simple_bind_s') - def test_filter_ldap_result_by_attr(self, mock_simple_bind_s, - mock_search_s, mock_connect): + def test_filter_ldap_result_by_attr( + self, mock_simple_bind_s, mock_search_s, mock_connect + ): # Mock the ldap search results to return user entries with # user_name_attribute('sn') value has emptyspaces, emptystring # and attibute itself is not set. - mock_search_s.return_value = [( - 'sn=junk1,dc=example,dc=com', - { - 'cn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - 'sn': ['junk1'] - } - ), + mock_search_s.return_value = [ ( - '', - { - 'cn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - } - ), + 'sn=junk1,dc=example,dc=com', + { + 'cn': [uuid.uuid4().hex], + 'email': [uuid.uuid4().hex], + 'sn': ['junk1'], + }, + ), ( - 'sn=,dc=example,dc=com', - { - 'cn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - 'sn': [''] - } - ), + '', + { + 'cn': [uuid.uuid4().hex], + 'email': [uuid.uuid4().hex], + }, + ), ( - 'sn= ,dc=example,dc=com', - { - 'cn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - 'sn': [' '] - } - )] + 'sn=,dc=example,dc=com', + { + 'cn': [uuid.uuid4().hex], + 'email': [uuid.uuid4().hex], + 'sn': [''], + }, + ), + ( + 'sn= ,dc=example,dc=com', + { + 'cn': [uuid.uuid4().hex], + 'email': [uuid.uuid4().hex], + 'sn': [' '], + }, + ), + ] user_api = identity.backends.ldap.UserApi(CONF) user_refs = user_api.get_all() @@ -1337,38 +1445,38 @@ class LDAPIdentity(BaseLDAPIdentity): @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'connect') @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'search_s') @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'simple_bind_s') - def test_filter_ldap_result_with_case_sensitive_attr(self, - mock_simple_bind_s, - mock_search_s, - mock_connect): + def test_filter_ldap_result_with_case_sensitive_attr( + self, mock_simple_bind_s, mock_search_s, mock_connect + ): # Mock the ldap search results to return user entries # irrespective of lowercase and uppercase characters in # ldap_result attribute keys e.g. {'Sn': ['junk1']} with # user_name_attribute('sn') - mock_search_s.return_value = [( - 'sn=junk1,dc=example,dc=com', - { - 'cn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - 'sN': ['junk1'] - } - ), + mock_search_s.return_value = [ ( - 'sn=junk1,dc=example,dc=com', - { - 'cn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - 'Sn': ['junk1'] - } - ), + 'sn=junk1,dc=example,dc=com', + { + 'cn': [uuid.uuid4().hex], + 'email': [uuid.uuid4().hex], + 'sN': ['junk1'], + }, + ), ( - 'sn=junk1,dc=example,dc=com', - { - 'cn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - 'sn': [' '] - } - ) + 'sn=junk1,dc=example,dc=com', + { + 'cn': [uuid.uuid4().hex], + 'email': [uuid.uuid4().hex], + 'Sn': ['junk1'], + }, + ), + ( + 'sn=junk1,dc=example,dc=com', + { + 'cn': [uuid.uuid4().hex], + 'email': [uuid.uuid4().hex], + 'sn': [' '], + }, + ), ] user_api = identity.backends.ldap.UserApi(CONF) @@ -1385,16 +1493,19 @@ class LDAPIdentity(BaseLDAPIdentity): def test_user_enabled_attribute_handles_expired(self, mock_ldap_get): # If using 'passwordisexpired' as enabled attribute, and inverting it, # Then an unauthorized user (expired password) should not be enabled. - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_attribute='passwordisexpired') + self.config_fixture.config( + group='ldap', + user_enabled_invert=True, + user_enabled_attribute='passwordisexpired', + ) mock_ldap_get.return_value = ( u'uid=123456789,c=us,ou=our_ldap,o=acme.com', { 'uid': [123456789], 'mail': ['shaun@acme.com'], 'passwordisexpired': ['TRUE'], - 'cn': ['uid=123456789,c=us,ou=our_ldap,o=acme.com'] - } + 'cn': ['uid=123456789,c=us,ou=our_ldap,o=acme.com'], + }, ) user_api = identity.backends.ldap.UserApi(CONF) @@ -1406,16 +1517,19 @@ class LDAPIdentity(BaseLDAPIdentity): # If using 'passwordisexpired' as enabled attribute, and inverting it, # and the result is utf8 encoded, then the an authorized user should # be enabled. - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_attribute='passwordisexpired') + self.config_fixture.config( + group='ldap', + user_enabled_invert=True, + user_enabled_attribute='passwordisexpired', + ) mock_ldap_get.return_value = ( u'uid=123456789,c=us,ou=our_ldap,o=acme.com', { 'uid': [123456789], 'mail': [u'shaun@acme.com'], 'passwordisexpired': [u'false'], - 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com'] - } + 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com'], + }, ) user_api = identity.backends.ldap.UserApi(CONF) @@ -1434,9 +1548,8 @@ class LDAPIdentity(BaseLDAPIdentity): @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'connect') def test_chase_referrals_off(self, mocked_fakeldap): self.config_fixture.config( - group='ldap', - url='fake://memory', - chase_referrals=False) + group='ldap', url='fake://memory', chase_referrals=False + ) user_api = identity.backends.ldap.UserApi(CONF) user_api.get_connection(user=None, password=None) @@ -1448,9 +1561,8 @@ class LDAPIdentity(BaseLDAPIdentity): @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'connect') def test_chase_referrals_on(self, mocked_fakeldap): self.config_fixture.config( - group='ldap', - url='fake://memory', - chase_referrals=True) + group='ldap', url='fake://memory', chase_referrals=True + ) user_api = identity.backends.ldap.UserApi(CONF) user_api.get_connection(user=None, password=None) @@ -1463,9 +1575,8 @@ class LDAPIdentity(BaseLDAPIdentity): def test_debug_level_set(self, mocked_fakeldap): level = 12345 self.config_fixture.config( - group='ldap', - url='fake://memory', - debug_level=level) + group='ldap', url='fake://memory', debug_level=level + ) user_api = identity.backends.ldap.UserApi(CONF) user_api.get_connection(user=None, password=None) @@ -1477,23 +1588,28 @@ class LDAPIdentity(BaseLDAPIdentity): def test_user_extra_attribute_mapping(self): self.config_fixture.config( group='ldap', - user_additional_attribute_mapping=['description:name']) + user_additional_attribute_mapping=['description:name'], + ) self.load_backends() - user = self.new_user_ref(name='EXTRA_ATTRIBUTES', - password='extra', - domain_id=CONF.identity.default_domain_id) + user = self.new_user_ref( + name='EXTRA_ATTRIBUTES', + password='extra', + domain_id=CONF.identity.default_domain_id, + ) user = PROVIDERS.identity_api.create_user(user) dn, attrs = PROVIDERS.identity_api.driver.user._ldap_get(user['id']) self.assertThat([user['name']], matchers.Equals(attrs['description'])) def test_user_description_attribute_mapping(self): self.config_fixture.config( - group='ldap', - user_description_attribute='displayName') + group='ldap', user_description_attribute='displayName' + ) self.load_backends() - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id, - displayName=uuid.uuid4().hex) + user = self.new_user_ref( + domain_id=CONF.identity.default_domain_id, + displayName=uuid.uuid4().hex, + ) description = user['displayName'] user = PROVIDERS.identity_api.create_user(user) res = PROVIDERS.identity_api.driver.user.get_all() @@ -1507,11 +1623,14 @@ class LDAPIdentity(BaseLDAPIdentity): self.config_fixture.config( group='ldap', - user_additional_attribute_mapping=['description:description']) + user_additional_attribute_mapping=['description:description'], + ) self.load_backends() - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id, - description=uuid.uuid4().hex) + user = self.new_user_ref( + domain_id=CONF.identity.default_domain_id, + description=uuid.uuid4().hex, + ) description = user['description'] user = PROVIDERS.identity_api.create_user(user) res = PROVIDERS.identity_api.driver.user.get_all() @@ -1527,12 +1646,16 @@ class LDAPIdentity(BaseLDAPIdentity): # the live tests. ldap_id_field = 'sn' ldap_id_value = uuid.uuid4().hex - dn = '%s=%s,ou=Users,cn=example,cn=com' % (ldap_id_field, - ldap_id_value) - modlist = [('objectClass', ['person', 'inetOrgPerson']), - (ldap_id_field, [ldap_id_value]), - ('mail', ['email@example.com']), - ('userPassword', [uuid.uuid4().hex])] + dn = '%s=%s,ou=Users,cn=example,cn=com' % ( + ldap_id_field, + ldap_id_value, + ) + modlist = [ + ('objectClass', ['person', 'inetOrgPerson']), + (ldap_id_field, [ldap_id_value]), + ('mail', ['email@example.com']), + ('userPassword', [uuid.uuid4().hex]), + ] ldap_.add_s(dn, modlist) # make sure the user doesn't break other users @@ -1548,60 +1671,76 @@ class LDAPIdentity(BaseLDAPIdentity): { 'sN': [uuid.uuid4().hex], 'MaIl': [uuid.uuid4().hex], - 'cn': ['junk'] - } + 'cn': ['junk'], + }, ) user = PROVIDERS.identity_api.get_user('junk') - self.assertEqual(mock_ldap_get.return_value[1]['sN'][0], - user['name']) - self.assertEqual(mock_ldap_get.return_value[1]['MaIl'][0], - user['email']) + self.assertEqual(mock_ldap_get.return_value[1]['sN'][0], user['name']) + self.assertEqual( + mock_ldap_get.return_value[1]['MaIl'][0], user['email'] + ) def test_parse_extra_attribute_mapping(self): - option_list = ['description:name', 'gecos:password', - 'fake:invalid', 'invalid1', 'invalid2:', - 'description:name:something'] + option_list = [ + 'description:name', + 'gecos:password', + 'fake:invalid', + 'invalid1', + 'invalid2:', + 'description:name:something', + ] mapping = PROVIDERS.identity_api.driver.user._parse_extra_attrs( option_list ) - expected_dict = {'description': 'name', 'gecos': 'password', - 'fake': 'invalid', 'invalid2': ''} + expected_dict = { + 'description': 'name', + 'gecos': 'password', + 'fake': 'invalid', + 'invalid2': '', + } self.assertDictEqual(expected_dict, mapping) def test_create_domain(self): domain = unit.new_domain_ref() - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.create_domain, - domain['id'], - domain) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.create_domain, + domain['id'], + domain, + ) @unit.skip_if_no_multiple_domains_support def test_create_domain_case_sensitivity(self): # domains are read-only, so case sensitivity isn't an issue ref = unit.new_domain_ref() - self.assertRaises(exception.Forbidden, - PROVIDERS.resource_api.create_domain, - ref['id'], - ref) + self.assertRaises( + exception.Forbidden, + PROVIDERS.resource_api.create_domain, + ref['id'], + ref, + ) def test_domain_rename_invalidates_get_domain_by_name_cache(self): parent = super(LDAPIdentity, self) self.assertRaises( exception.Forbidden, - parent.test_domain_rename_invalidates_get_domain_by_name_cache) + parent.test_domain_rename_invalidates_get_domain_by_name_cache, + ) def test_project_rename_invalidates_get_project_by_name_cache(self): parent = super(LDAPIdentity, self) self.assertRaises( exception.Forbidden, - parent.test_project_rename_invalidates_get_project_by_name_cache) + parent.test_project_rename_invalidates_get_project_by_name_cache, + ) def test_project_crud(self): # NOTE(topol): LDAP implementation does not currently support the # updating of a project name so this method override # provides a different update test project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project = PROVIDERS.resource_api.create_project(project['id'], project) project_ref = PROVIDERS.resource_api.get_project(project['id']) @@ -1614,9 +1753,11 @@ class LDAPIdentity(BaseLDAPIdentity): self.assertDictEqual(project, project_ref) PROVIDERS.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project['id'], + ) @unit.skip_if_cache_disabled('assignment') def test_cache_layer_project_crud(self): @@ -1624,7 +1765,8 @@ class LDAPIdentity(BaseLDAPIdentity): # updating project names. This method override provides a different # update test. project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project_id = project['id'] # Create a project project = PROVIDERS.resource_api.create_project(project_id, project) @@ -1638,7 +1780,8 @@ class LDAPIdentity(BaseLDAPIdentity): # Verify get_project still returns the original project_ref self.assertLessEqual( project.items(), - PROVIDERS.resource_api.get_project(project_id).items()) + PROVIDERS.resource_api.get_project(project_id).items(), + ) # Invalidate cache PROVIDERS.resource_api.get_project.invalidate( PROVIDERS.resource_api, project_id @@ -1646,36 +1789,43 @@ class LDAPIdentity(BaseLDAPIdentity): # Verify get_project now returns the new project self.assertLessEqual( updated_project.items(), - PROVIDERS.resource_api.get_project(project_id).items()) + PROVIDERS.resource_api.get_project(project_id).items(), + ) # Update project using the resource_api manager back to original PROVIDERS.resource_api.update_project(project['id'], project) # Verify get_project returns the original project_ref self.assertLessEqual( project.items(), - PROVIDERS.resource_api.get_project(project_id).items()) + PROVIDERS.resource_api.get_project(project_id).items(), + ) # Delete project bypassing resource_api PROVIDERS.resource_api.driver.delete_project(project_id) # Verify get_project still returns the project_ref self.assertLessEqual( project.items(), - PROVIDERS.resource_api.get_project(project_id).items()) + PROVIDERS.resource_api.get_project(project_id).items(), + ) # Invalidate cache PROVIDERS.resource_api.get_project.invalidate( PROVIDERS.resource_api, project_id ) # Verify ProjectNotFound now raised - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project_id) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project_id, + ) # recreate project PROVIDERS.resource_api.create_project(project_id, project) PROVIDERS.resource_api.get_project(project_id) # delete project PROVIDERS.resource_api.delete_project(project_id) # Verify ProjectNotFound is raised - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project_id) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project_id, + ) def test_update_is_domain_field(self): domain = self._get_domain_fixture() @@ -1684,9 +1834,12 @@ class LDAPIdentity(BaseLDAPIdentity): # Try to update the is_domain field to True project['is_domain'] = True - self.assertRaises(exception.ValidationError, - PROVIDERS.resource_api.update_project, - project['id'], project) + self.assertRaises( + exception.ValidationError, + PROVIDERS.resource_api.update_project, + project['id'], + project, + ) def test_multi_role_grant_by_user_group_on_project_domain(self): # This is a partial implementation of the standard test that @@ -1703,17 +1856,20 @@ class LDAPIdentity(BaseLDAPIdentity): user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) user1 = PROVIDERS.identity_api.create_user(user1) project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project1['id'], project1) PROVIDERS.assignment_api.add_role_to_user_and_project( user_id=user1['id'], project_id=project1['id'], - role_id=role_list[0]['id']) + role_id=role_list[0]['id'], + ) PROVIDERS.assignment_api.add_role_to_user_and_project( user_id=user1['id'], project_id=project1['id'], - role_id=role_list[1]['id']) + role_id=role_list[1]['id'], + ) # Although list_grants are not yet supported, we can test the # alternate way of getting back lists of grants, where user @@ -1748,37 +1904,37 @@ class LDAPIdentity(BaseLDAPIdentity): def test_base_ldap_connection_deref_option(self): def get_conn(deref_name): - self.config_fixture.config(group='ldap', - alias_dereferencing=deref_name) + self.config_fixture.config( + group='ldap', alias_dereferencing=deref_name + ) base_ldap = common_ldap.BaseLdap(CONF) return base_ldap.get_connection() conn = get_conn('default') - self.assertEqual(ldap.get_option(ldap.OPT_DEREF), - conn.get_option(ldap.OPT_DEREF)) + self.assertEqual( + ldap.get_option(ldap.OPT_DEREF), conn.get_option(ldap.OPT_DEREF) + ) conn = get_conn('always') - self.assertEqual(ldap.DEREF_ALWAYS, - conn.get_option(ldap.OPT_DEREF)) + self.assertEqual(ldap.DEREF_ALWAYS, conn.get_option(ldap.OPT_DEREF)) conn = get_conn('finding') - self.assertEqual(ldap.DEREF_FINDING, - conn.get_option(ldap.OPT_DEREF)) + self.assertEqual(ldap.DEREF_FINDING, conn.get_option(ldap.OPT_DEREF)) conn = get_conn('never') - self.assertEqual(ldap.DEREF_NEVER, - conn.get_option(ldap.OPT_DEREF)) + self.assertEqual(ldap.DEREF_NEVER, conn.get_option(ldap.OPT_DEREF)) conn = get_conn('searching') - self.assertEqual(ldap.DEREF_SEARCHING, - conn.get_option(ldap.OPT_DEREF)) + self.assertEqual(ldap.DEREF_SEARCHING, conn.get_option(ldap.OPT_DEREF)) def test_list_users_no_dn(self): users = PROVIDERS.identity_api.list_users() self.assertEqual(len(default_fixtures.USERS), len(users)) user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['name'])['id'] - for user in default_fixtures.USERS) + expected_user_ids = set( + getattr(self, 'user_%s' % user['name'])['id'] + for user in default_fixtures.USERS + ) for user_ref in users: self.assertNotIn('dn', user_ref) self.assertEqual(expected_user_ids, user_ids) @@ -1824,7 +1980,8 @@ class LDAPIdentity(BaseLDAPIdentity): def test_user_id_attribute_in_create(self): driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.user.id_attr = 'mail' user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -1836,7 +1993,8 @@ class LDAPIdentity(BaseLDAPIdentity): def test_user_id_attribute_map(self): driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.user.id_attr = 'mail' user_ref = PROVIDERS.identity_api.get_user(self.user_foo['email']) @@ -1845,10 +2003,10 @@ class LDAPIdentity(BaseLDAPIdentity): self.assertEqual(self.user_foo['email'], user_ref['id']) @mock.patch.object(common_ldap.BaseLdap, '_ldap_get') - def test_get_multivalued_attribute_id_from_dn(self, - mock_ldap_get): + def test_get_multivalued_attribute_id_from_dn(self, mock_ldap_get): driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.user.id_attr = 'mail' # make 'email' multivalued so we can test the error condition @@ -1861,19 +2019,22 @@ class LDAPIdentity(BaseLDAPIdentity): 'cn=users,dc=example,dc=com', { 'mail': [email1, email2], - } + }, ) # This is not a valid scenario, since we do not support multiple value # attribute id on DN. - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.get_user, email1) + self.assertRaises( + exception.NotFound, PROVIDERS.identity_api.get_user, email1 + ) @mock.patch.object(common_ldap.BaseLdap, '_ldap_get') - def test_raise_not_found_dn_for_multivalued_attribute_id(self, - mock_ldap_get): + def test_raise_not_found_dn_for_multivalued_attribute_id( + self, mock_ldap_get + ): driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.user.id_attr = 'mail' # make 'email' multivalued so we can test the error condition @@ -1884,20 +2045,21 @@ class LDAPIdentity(BaseLDAPIdentity): { 'sn': [uuid.uuid4().hex], 'mail': [email1, email2], - 'cn': 'nobodycares' - } + 'cn': 'nobodycares', + }, ) # This is not a valid scenario, since we do not support multiple value # attribute id on DN. - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.get_user, email1) + self.assertRaises( + exception.NotFound, PROVIDERS.identity_api.get_user, email1 + ) @mock.patch.object(common_ldap.BaseLdap, '_ldap_get') - def test_get_id_not_in_dn(self, - mock_ldap_get): + def test_get_id_not_in_dn(self, mock_ldap_get): driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.user.id_attr = 'sAMAccountName' user_id = uuid.uuid4().hex @@ -1907,7 +2069,7 @@ class LDAPIdentity(BaseLDAPIdentity): 'cn': 'someuser', 'sn': [uuid.uuid4().hex], 'sAMAccountName': [user_id], - } + }, ) user_ref = PROVIDERS.identity_api.get_user(user_id) self.assertEqual(user_id, user_ref['id']) @@ -1918,18 +2080,17 @@ class LDAPIdentity(BaseLDAPIdentity): 'cn=nobodycares,dc=example,dc=com', { 'sn': [uuid.uuid4().hex], - } + }, ) user_api = identity.backends.ldap.UserApi(CONF) - self.assertRaises(exception.NotFound, - user_api.get, - 'nobodycares') + self.assertRaises(exception.NotFound, user_api.get, 'nobodycares') @mock.patch.object(common_ldap.BaseLdap, '_ldap_get') def test_user_id_not_in_dn(self, mock_ldap_get): driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.user.id_attr = 'uid' driver.user.attribute_mapping['name'] = 'cn' @@ -1939,8 +2100,8 @@ class LDAPIdentity(BaseLDAPIdentity): 'sn': [uuid.uuid4().hex], 'foo': ['bar'], 'cn': ['junk'], - 'uid': ['crap'] - } + 'uid': ['crap'], + }, ) user_ref = PROVIDERS.identity_api.get_user('crap') self.assertEqual('crap', user_ref['id']) @@ -1949,7 +2110,8 @@ class LDAPIdentity(BaseLDAPIdentity): @mock.patch.object(common_ldap.BaseLdap, '_ldap_get') def test_user_name_in_dn(self, mock_ldap_get): driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.user.id_attr = 'SAMAccountName' driver.user.attribute_mapping['name'] = 'cn' @@ -1958,8 +2120,8 @@ class LDAPIdentity(BaseLDAPIdentity): { 'sn': [uuid.uuid4().hex], 'cn': ['Foo Bar'], - 'SAMAccountName': ['crap'] - } + 'SAMAccountName': ['crap'], + }, ) user_ref = PROVIDERS.identity_api.get_user('crap') self.assertEqual('crap', user_ref['id']) @@ -1994,16 +2156,16 @@ class LDAPLimitTests(unit.TestCase, identity_tests.LimitTests): self.load_backends() self.load_fixtures(default_fixtures) identity_tests.LimitTests.setUp(self) - _assert_backends(self, - assignment='sql', - identity='ldap', - resource='sql') + _assert_backends( + self, assignment='sql', identity='ldap', resource='sql' + ) def config_overrides(self): super(LDAPLimitTests, self).config_overrides() self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='identity', - list_limit=len(default_fixtures.USERS) - 1) + self.config_fixture.config( + group='identity', list_limit=len(default_fixtures.USERS) - 1 + ) def config_files(self): config_files = super(LDAPLimitTests, self).config_files() @@ -2019,8 +2181,13 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): def load_fixtures(self, fixtures): # Override super impl since need to create group container. super(LDAPIdentity, self).load_fixtures(fixtures) - for obj in [self.project_bar, self.project_baz, self.user_foo, - self.user_two, self.user_badguy]: + for obj in [ + self.project_bar, + self.project_baz, + self.user_foo, + self.user_two, + self.user_badguy, + ]: obj.setdefault('enabled', True) def config_files(self): @@ -2030,15 +2197,15 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): def config_overrides(self): super(LDAPIdentityEnabledEmulation, self).config_overrides() - self.config_fixture.config(group='ldap', - user_enabled_emulation=True) + self.config_fixture.config(group='ldap', user_enabled_emulation=True) def test_project_crud(self): # NOTE(topol): LDAPIdentityEnabledEmulation will create an # enabled key in the project dictionary so this # method override handles this side-effect project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project = PROVIDERS.resource_api.create_project(project['id'], project) project_ref = PROVIDERS.resource_api.get_project(project['id']) @@ -2055,28 +2222,33 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): self.assertDictEqual(project, project_ref) PROVIDERS.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project['id'], + ) def test_user_auth_emulated(self): driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.user.enabled_emulation_dn = 'cn=test,dc=test' with self.make_request(): PROVIDERS.identity_api.authenticate( - user_id=self.user_foo['id'], - password=self.user_foo['password']) + user_id=self.user_foo['id'], password=self.user_foo['password'] + ) def test_user_enable_attribute_mask(self): self.skip_test_overrides( - "Enabled emulation conflicts with enabled mask") + "Enabled emulation conflicts with enabled mask" + ) def test_user_enabled_use_group_config(self): # Establish enabled-emulation group name to later query its members group_name = 'enabled_users' driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) group_dn = 'cn=%s,%s' % (group_name, driver.group.tree_dn) self.config_fixture.config( @@ -2085,13 +2257,15 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): user_enabled_emulation_dn=group_dn, group_name_attribute='cn', group_member_attribute='uniqueMember', - group_objectclass='groupOfUniqueNames') + group_objectclass='groupOfUniqueNames', + ) self.ldapdb.clear() self.load_backends() # Create a user and ensure they are enabled. - user1 = unit.new_user_ref(enabled=True, - domain_id=CONF.identity.default_domain_id) + user1 = unit.new_user_ref( + enabled=True, domain_id=CONF.identity.default_domain_id + ) user_ref = PROVIDERS.identity_api.create_user(user1) self.assertIs(True, user_ref['enabled']) @@ -2101,15 +2275,18 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): # Ensure state matches the group config group_ref = PROVIDERS.identity_api.get_group_by_name( - group_name, CONF.identity.default_domain_id) + group_name, CONF.identity.default_domain_id + ) PROVIDERS.identity_api.check_user_in_group( - user_ref['id'], group_ref['id']) + user_ref['id'], group_ref['id'] + ) def test_user_enabled_use_group_config_with_ids(self): # Establish enabled-emulation group name to later query its members group_name = 'enabled_users' driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) group_dn = 'cn=%s,%s' % (group_name, driver.group.tree_dn) self.config_fixture.config( @@ -2119,13 +2296,15 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): group_name_attribute='cn', group_member_attribute='memberUid', group_members_are_ids=True, - group_objectclass='posixGroup') + group_objectclass='posixGroup', + ) self.ldapdb.clear() self.load_backends() # Create a user and ensure they are enabled. - user1 = unit.new_user_ref(enabled=True, - domain_id=CONF.identity.default_domain_id) + user1 = unit.new_user_ref( + enabled=True, domain_id=CONF.identity.default_domain_id + ) user_ref = PROVIDERS.identity_api.create_user(user1) self.assertIs(True, user_ref['enabled']) @@ -2135,20 +2314,26 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): # Ensure state matches the group config group_ref = PROVIDERS.identity_api.get_group_by_name( - group_name, CONF.identity.default_domain_id) + group_name, CONF.identity.default_domain_id + ) PROVIDERS.identity_api.check_user_in_group( - user_ref['id'], group_ref['id']) + user_ref['id'], group_ref['id'] + ) def test_user_enabled_invert(self): - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_default='False') + self.config_fixture.config( + group='ldap', + user_enabled_invert=True, + user_enabled_default='False', + ) self.ldapdb.clear() self.load_backends() user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user2 = self.new_user_ref(enabled=False, - domain_id=CONF.identity.default_domain_id) + user2 = self.new_user_ref( + enabled=False, domain_id=CONF.identity.default_domain_id + ) user3 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) @@ -2190,23 +2375,25 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): self.assertIs(True, user_ref['enabled']) def test_user_enabled_invert_default_str_value(self): - self.skip_test_overrides( - "N/A: Covered by test_user_enabled_invert") + self.skip_test_overrides("N/A: Covered by test_user_enabled_invert") @mock.patch.object(common_ldap.BaseLdap, '_ldap_get') def test_user_enabled_attribute_handles_utf8(self, mock_ldap_get): # Since user_enabled_emulation is enabled in this test, this test will # fail since it's using user_enabled_invert. - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_attribute='passwordisexpired') + self.config_fixture.config( + group='ldap', + user_enabled_invert=True, + user_enabled_attribute='passwordisexpired', + ) mock_ldap_get.return_value = ( u'uid=123456789,c=us,ou=our_ldap,o=acme.com', { 'uid': [123456789], 'mail': [u'shaun@acme.com'], 'passwordisexpired': [u'false'], - 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com'] - } + 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com'], + }, ) user_api = identity.backends.ldap.UserApi(CONF) @@ -2219,7 +2406,8 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): object_id = uuid.uuid4().hex driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) # driver.user is the EnabledEmuMixIn implementation used for this test. mixin_impl = driver.user @@ -2235,12 +2423,16 @@ class LDAPIdentityEnabledEmulation(LDAPIdentity, unit.TestCase): # The filter, which _is_id_enabled is going to build, contains the # tree_dn, which better be escaped in this case. exp_filter = '(%s=%s=%s,%s)' % ( - mixin_impl.member_attribute, mixin_impl.id_attr, object_id, - sample_dn_filter_esc) + mixin_impl.member_attribute, + mixin_impl.id_attr, + object_id, + sample_dn_filter_esc, + ) with mixin_impl.get_connection() as conn: m = self.useFixture( - fixtures.MockPatchObject(conn, 'search_s')).mock + fixtures.MockPatchObject(conn, 'search_s') + ).mock mixin_impl._is_id_enabled(object_id, conn) # The 3rd argument is the DN. self.assertEqual(exp_filter, m.call_args[0][2]) @@ -2254,8 +2446,11 @@ class LDAPPosixGroupsTest(LDAPTestSetup, unit.TestCase): def config_overrides(self): super(LDAPPosixGroupsTest, self).config_overrides() self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='ldap', group_members_are_ids=True, - group_member_attribute='memberUID') + self.config_fixture.config( + group='ldap', + group_members_are_ids=True, + group_member_attribute='memberUID', + ) def config_files(self): config_files = super(LDAPPosixGroupsTest, self).config_files() @@ -2300,7 +2495,8 @@ class LDAPPosixGroupsTest(LDAPTestSetup, unit.TestCase): class LdapIdentityWithMapping( - BaseLDAPIdentity, unit.SQLDriverOverrides, unit.TestCase): + BaseLDAPIdentity, unit.SQLDriverOverrides, unit.TestCase +): """Class to test mapping of default LDAP backend. The default configuration is not to enable mapping when using a single @@ -2325,8 +2521,9 @@ class LdapIdentityWithMapping( def config_overrides(self): super(LdapIdentityWithMapping, self).config_overrides() self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=False + ) def test_dynamic_mapping_build(self): """Test to ensure entities not create via controller are mapped. @@ -2350,12 +2547,16 @@ class LdapIdentityWithMapping( PROVIDERS.id_mapping_api.purge_mappings({'public_id': user2['id']}) # We should no longer be able to get these users via their old IDs - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, - user1['id']) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, - user2['id']) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.get_user, + user1['id'], + ) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.get_user, + user2['id'], + ) # Now enumerate all users...this should re-build the mapping, and # we should be able to find the users via their original public IDs. @@ -2368,7 +2569,8 @@ class LdapIdentityWithMapping( default_domain = unit.new_domain_ref( description=u'The default domain', id=CONF.identity.default_domain_id, - name=u'Default') + name=u'Default', + ) self.assertEqual([default_domain], domains) @@ -2383,25 +2585,29 @@ class BaseMultiLDAPandSQLIdentity(object): users = {} users['user0'] = unit.create_user( - PROVIDERS.identity_api, - self.domain_default['id']) + PROVIDERS.identity_api, self.domain_default['id'] + ) PROVIDERS.assignment_api.create_grant( user_id=users['user0']['id'], domain_id=self.domain_default['id'], - role_id=self.role_member['id']) + role_id=self.role_member['id'], + ) for x in range(1, self.domain_count): users['user%s' % x] = unit.create_user( - PROVIDERS.identity_api, - self.domains['domain%s' % x]['id']) + PROVIDERS.identity_api, self.domains['domain%s' % x]['id'] + ) PROVIDERS.assignment_api.create_grant( user_id=users['user%s' % x]['id'], domain_id=self.domains['domain%s' % x]['id'], - role_id=self.role_member['id']) + role_id=self.role_member['id'], + ) # So how many new id mappings should have been created? One for each # user created in a domain that is using the non default driver.. - self.assertEqual(initial_mappings + self.domain_specific_count, - len(mapping_sql.list_id_mappings())) + self.assertEqual( + initial_mappings + self.domain_specific_count, + len(mapping_sql.list_id_mappings()), + ) return users @@ -2415,13 +2621,14 @@ class BaseMultiLDAPandSQLIdentity(object): """ driver = PROVIDERS.identity_api._select_identity_driver(domain_id) unused, unused, entity_id = ( - PROVIDERS.identity_api._get_domain_driver_and_entity_id( - user['id'])) + PROVIDERS.identity_api._get_domain_driver_and_entity_id(user['id']) + ) if expected_status == http.client.OK: ref = driver.get_user(entity_id) ref = PROVIDERS.identity_api._set_domain_id_and_mapping( - ref, domain_id, driver, map.EntityType.USER) + ref, domain_id, driver, map.EntityType.USER + ) user = user.copy() del user['password'] self.assertDictEqual(user, ref) @@ -2439,17 +2646,18 @@ class BaseMultiLDAPandSQLIdentity(object): def create_domain(domain): try: ref = PROVIDERS.resource_api.create_domain( - domain['id'], domain) + domain['id'], domain + ) except exception.Conflict: - ref = ( - PROVIDERS.resource_api.get_domain_by_name(domain['name'])) + ref = PROVIDERS.resource_api.get_domain_by_name(domain['name']) return ref self.domains = {} for x in range(1, self.domain_count): domain = 'domain%s' % x self.domains[domain] = create_domain( - {'id': uuid.uuid4().hex, 'name': domain}) + {'id': uuid.uuid4().hex, 'name': domain} + ) def test_authenticate_to_each_domain(self): """Test that a user in each domain can authenticate.""" @@ -2459,12 +2667,16 @@ class BaseMultiLDAPandSQLIdentity(object): user = 'user%s' % user_num with self.make_request(): PROVIDERS.identity_api.authenticate( - user_id=users[user]['id'], - password=users[user]['password']) + user_id=users[user]['id'], password=users[user]['password'] + ) -class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, - unit.TestCase, BaseMultiLDAPandSQLIdentity): +class MultiLDAPandSQLIdentity( + BaseLDAPIdentity, + unit.SQLDriverOverrides, + unit.TestCase, + BaseMultiLDAPandSQLIdentity, +): """Class to test common SQL plus individual LDAP backends. We define a set of domains and domain-specific backends: @@ -2487,7 +2699,8 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, self.domain_count = 5 self.domain_specific_count = 3 PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) self.setup_initial_domains() # All initial test data setup complete, time to switch on support @@ -2497,15 +2710,17 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, super(MultiLDAPandSQLIdentity, self).load_fixtures(fixtures) def assert_backends(self): - _assert_backends(self, - assignment='sql', - identity={ - None: 'sql', - self.domain_default['id']: 'ldap', - self.domains['domain1']['id']: 'ldap', - self.domains['domain2']['id']: 'ldap', - }, - resource='sql') + _assert_backends( + self, + assignment='sql', + identity={ + None: 'sql', + self.domain_default['id']: 'ldap', + self.domains['domain1']['id']: 'ldap', + self.domains['domain2']['id']: 'ldap', + }, + resource='sql', + ) def config_overrides(self): super(MultiLDAPandSQLIdentity, self).config_overrides() @@ -2524,11 +2739,14 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, """ self.config_fixture.config( - group='identity', domain_specific_drivers_enabled=True, + group='identity', + domain_specific_drivers_enabled=True, domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap', - list_limit=1000) - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) + list_limit=1000, + ) + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=False + ) def get_config(self, domain_id): # Get the config for this domain, will return CONF @@ -2543,11 +2761,15 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, # than in the standard test. users = PROVIDERS.identity_api.list_users( domain_scope=self._set_domain_scope( - CONF.identity.default_domain_id)) + CONF.identity.default_domain_id + ) + ) self.assertEqual(len(default_fixtures.USERS) + 1, len(users)) user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['name'])['id'] - for user in default_fixtures.USERS) + expected_user_ids = set( + getattr(self, 'user_%s' % user['name'])['id'] + for user in default_fixtures.USERS + ) expected_user_ids.add(_users['user0']['id']) for user_ref in users: self.assertNotIn('password', user_ref) @@ -2559,8 +2781,8 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, # is considered be disabled hints = driver_hints.Hints() PROVIDERS.identity_api.list_users( - domain_scope=self.domains['domain2']['id'], - hints=hints) + domain_scope=self.domains['domain2']['id'], hints=hints + ) # since list_limit is not specified in keystone.domain2.conf, it should # take the default, which is 1000 self.assertTrue(ldap_get_all.called) @@ -2574,8 +2796,8 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, # is considered to be disabled hints = driver_hints.Hints() PROVIDERS.identity_api.list_users( - domain_scope=self.domains['domain1']['id'], - hints=hints) + domain_scope=self.domains['domain1']['id'], hints=hints + ) # this should have the list_limit set in Keystone.domain1.conf, which # is 101 self.assertTrue(ldap_get_all.called) @@ -2600,45 +2822,58 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, # driver, but won't find it via any other domain driver check_user = self.check_user - check_user(users['user0'], - self.domain_default['id'], http.client.OK) - for domain in [self.domains['domain1']['id'], - self.domains['domain2']['id'], - self.domains['domain3']['id'], - self.domains['domain4']['id']]: + check_user(users['user0'], self.domain_default['id'], http.client.OK) + for domain in [ + self.domains['domain1']['id'], + self.domains['domain2']['id'], + self.domains['domain3']['id'], + self.domains['domain4']['id'], + ]: check_user(users['user0'], domain, exception.UserNotFound) - check_user(users['user1'], self.domains['domain1']['id'], - http.client.OK) - for domain in [self.domain_default['id'], - self.domains['domain2']['id'], - self.domains['domain3']['id'], - self.domains['domain4']['id']]: + check_user( + users['user1'], self.domains['domain1']['id'], http.client.OK + ) + for domain in [ + self.domain_default['id'], + self.domains['domain2']['id'], + self.domains['domain3']['id'], + self.domains['domain4']['id'], + ]: check_user(users['user1'], domain, exception.UserNotFound) - check_user(users['user2'], self.domains['domain2']['id'], - http.client.OK) - for domain in [self.domain_default['id'], - self.domains['domain1']['id'], - self.domains['domain3']['id'], - self.domains['domain4']['id']]: + check_user( + users['user2'], self.domains['domain2']['id'], http.client.OK + ) + for domain in [ + self.domain_default['id'], + self.domains['domain1']['id'], + self.domains['domain3']['id'], + self.domains['domain4']['id'], + ]: check_user(users['user2'], domain, exception.UserNotFound) # domain3 and domain4 share the same backend, so you should be # able to see user3 and user4 from either. - check_user(users['user3'], self.domains['domain3']['id'], - http.client.OK) - check_user(users['user3'], self.domains['domain4']['id'], - http.client.OK) - check_user(users['user4'], self.domains['domain3']['id'], - http.client.OK) - check_user(users['user4'], self.domains['domain4']['id'], - http.client.OK) + check_user( + users['user3'], self.domains['domain3']['id'], http.client.OK + ) + check_user( + users['user3'], self.domains['domain4']['id'], http.client.OK + ) + check_user( + users['user4'], self.domains['domain3']['id'], http.client.OK + ) + check_user( + users['user4'], self.domains['domain4']['id'], http.client.OK + ) - for domain in [self.domain_default['id'], - self.domains['domain1']['id'], - self.domains['domain2']['id']]: + for domain in [ + self.domain_default['id'], + self.domains['domain1']['id'], + self.domains['domain2']['id'], + ]: check_user(users['user3'], domain, exception.UserNotFound) check_user(users['user4'], domain, exception.UserNotFound) @@ -2652,20 +2887,25 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, # # The listing of the default domain is already handled in the # test_lists_users() method. - for domain in [self.domains['domain1']['id'], - self.domains['domain2']['id'], - self.domains['domain4']['id']]: + for domain in [ + self.domains['domain1']['id'], + self.domains['domain2']['id'], + self.domains['domain4']['id'], + ]: self.assertThat( PROVIDERS.identity_api.list_users(domain_scope=domain), - matchers.HasLength(1)) + matchers.HasLength(1), + ) # domain3 had a user created before we switched on # multiple backends, plus one created afterwards - and its # backend has not changed - so we should find two. self.assertThat( PROVIDERS.identity_api.list_users( - domain_scope=self.domains['domain3']['id']), - matchers.HasLength(1)) + domain_scope=self.domains['domain3']['id'] + ), + matchers.HasLength(1), + ) def test_existing_uuids_work(self): """Test that 'uni-domain' created IDs still work. @@ -2675,14 +2915,14 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, """ userA = unit.create_user( - PROVIDERS.identity_api, - self.domain_default['id']) + PROVIDERS.identity_api, self.domain_default['id'] + ) userB = unit.create_user( - PROVIDERS.identity_api, - self.domains['domain1']['id']) + PROVIDERS.identity_api, self.domains['domain1']['id'] + ) userC = unit.create_user( - PROVIDERS.identity_api, - self.domains['domain3']['id']) + PROVIDERS.identity_api, self.domains['domain3']['id'] + ) PROVIDERS.identity_api.get_user(userA['id']) PROVIDERS.identity_api.get_user(userB['id']) PROVIDERS.identity_api.get_user(userC['id']) @@ -2702,22 +2942,32 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, self.load_backends() # Execute any command to trigger the lazy loading of domain configs PROVIDERS.identity_api.list_users( - domain_scope=self.domains['domain1']['id']) + domain_scope=self.domains['domain1']['id'] + ) # ...and now check the domain configs have been set up self.assertIn('default', PROVIDERS.identity_api.domain_configs) - self.assertIn(self.domains['domain1']['id'], - PROVIDERS.identity_api.domain_configs) - self.assertIn(self.domains['domain2']['id'], - PROVIDERS.identity_api.domain_configs) - self.assertNotIn(self.domains['domain3']['id'], - PROVIDERS.identity_api.domain_configs) - self.assertNotIn(self.domains['domain4']['id'], - PROVIDERS.identity_api.domain_configs) + self.assertIn( + self.domains['domain1']['id'], + PROVIDERS.identity_api.domain_configs, + ) + self.assertIn( + self.domains['domain2']['id'], + PROVIDERS.identity_api.domain_configs, + ) + self.assertNotIn( + self.domains['domain3']['id'], + PROVIDERS.identity_api.domain_configs, + ) + self.assertNotIn( + self.domains['domain4']['id'], + PROVIDERS.identity_api.domain_configs, + ) # Finally check that a domain specific config contains items from both # the primary config and the domain specific config conf = PROVIDERS.identity_api.domain_configs.get_domain_conf( - self.domains['domain1']['id']) + self.domains['domain1']['id'] + ) # This should now be false, as is the default, since this is not # set in the standard primary config file self.assertFalse(conf.identity.domain_specific_drivers_enabled) @@ -2733,29 +2983,37 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, self.assertDictEqual(project, project_ref) PROVIDERS.assignment_api.create_grant( - user_id=self.user_foo['id'], project_id=project['id'], - role_id=self.role_member['id'] + user_id=self.user_foo['id'], + project_id=project['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.delete_grant( - user_id=self.user_foo['id'], project_id=project['id'], - role_id=self.role_member['id'] + user_id=self.user_foo['id'], + project_id=project['id'], + role_id=self.role_member['id'], ) domain['enabled'] = False PROVIDERS.resource_api.update_domain(domain['id'], domain) PROVIDERS.resource_api.delete_domain(domain['id']) - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain, - domain['id']) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain, + domain['id'], + ) def test_user_enabled_ignored_disable_error(self): # Override. - self.skip_test_overrides("Doesn't apply since LDAP config has no " - "affect on the SQL identity backend.") + self.skip_test_overrides( + "Doesn't apply since LDAP config has no " + "affect on the SQL identity backend." + ) def test_group_enabled_ignored_disable_error(self): # Override. - self.skip_test_overrides("Doesn't apply since LDAP config has no " - "affect on the SQL identity backend.") + self.skip_test_overrides( + "Doesn't apply since LDAP config has no " + "affect on the SQL identity backend." + ) def test_list_role_assignments_filtered_by_role(self): # Domain roles are supported by the SQL Assignment backend @@ -2770,8 +3028,9 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, def test_list_role_assignment_by_user_with_domain_group_roles(self): # With multi LDAP this method should work, so override the override # from BaseLDAPIdentity - super(BaseLDAPIdentity, self).\ - test_list_role_assignment_by_user_with_domain_group_roles() + super( + BaseLDAPIdentity, self + ).test_list_role_assignment_by_user_with_domain_group_roles() def test_list_role_assignment_using_sourced_groups_with_domains(self): # With SQL Assignment this method should work, so override the override @@ -2782,14 +3041,16 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, def test_create_project_with_domain_id_and_without_parent_id(self): # With multi LDAP this method should work, so override the override # from BaseLDAPIdentity - super(BaseLDAPIdentity, self).\ - test_create_project_with_domain_id_and_without_parent_id() + super( + BaseLDAPIdentity, self + ).test_create_project_with_domain_id_and_without_parent_id() def test_create_project_with_domain_id_mismatch_to_parent_domain(self): # With multi LDAP this method should work, so override the override # from BaseLDAPIdentity - super(BaseLDAPIdentity, self).\ - test_create_project_with_domain_id_mismatch_to_parent_domain() + super( + BaseLDAPIdentity, self + ).test_create_project_with_domain_id_mismatch_to_parent_domain() def test_remove_foreign_assignments_when_deleting_a_domain(self): # With multi LDAP this method should work, so override the override @@ -2799,8 +3060,9 @@ class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, @mock.patch.object(ldap_identity.Identity, 'unset_default_project_id') @mock.patch.object(sql_identity.Identity, 'unset_default_project_id') - def test_delete_project_unset_project_ids_for_all_backends(self, sql_mock, - ldap_mock): + def test_delete_project_unset_project_ids_for_all_backends( + self, sql_mock, ldap_mock + ): ldap_mock.side_effect = exception.Forbidden project = unit.new_project_ref( domain_id=CONF.identity.default_domain_id @@ -2821,42 +3083,49 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): """ def assert_backends(self): - _assert_backends(self, - assignment='sql', - identity={ - None: 'sql', - self.domain_default['id']: 'ldap', - self.domains['domain1']['id']: 'ldap', - self.domains['domain2']['id']: 'ldap', - }, - resource='sql') + _assert_backends( + self, + assignment='sql', + identity={ + None: 'sql', + self.domain_default['id']: 'ldap', + self.domains['domain1']['id']: 'ldap', + self.domains['domain2']['id']: 'ldap', + }, + resource='sql', + ) def enable_multi_domain(self): # The values below are the same as in the domain_configs_multi_ldap # directory of test config_files. default_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap'} + 'ldap': { + 'url': 'fake://memory', + 'user': 'cn=Admin', + 'password': 'password', + 'suffix': 'cn=example,cn=com', + }, + 'identity': {'driver': 'ldap'}, } domain1_config = { - 'ldap': {'url': 'fake://memory1', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap', - 'list_limit': 101} + 'ldap': { + 'url': 'fake://memory1', + 'user': 'cn=Admin', + 'password': 'password', + 'suffix': 'cn=example,cn=com', + }, + 'identity': {'driver': 'ldap', 'list_limit': 101}, } domain2_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=myroot,cn=com', - 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org', - 'user_tree_dn': 'ou=Users,dc=myroot,dc=org'}, - 'identity': {'driver': 'ldap'} + 'ldap': { + 'url': 'fake://memory', + 'user': 'cn=Admin', + 'password': 'password', + 'suffix': 'cn=myroot,cn=com', + 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org', + 'user_tree_dn': 'ou=Users,dc=myroot,dc=org', + }, + 'identity': {'driver': 'ldap'}, } PROVIDERS.domain_config_api.create_config( @@ -2870,11 +3139,14 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): ) self.config_fixture.config( - group='identity', domain_specific_drivers_enabled=True, + group='identity', + domain_specific_drivers_enabled=True, domain_configurations_from_database=True, - list_limit=1000) - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) + list_limit=1000, + ) + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=False + ) def test_domain_config_has_no_impact_if_database_support_disabled(self): """Ensure database domain configs have no effect if disabled. @@ -2884,19 +3156,21 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): """ self.config_fixture.config( - group='identity', domain_configurations_from_database=False) + group='identity', domain_configurations_from_database=False + ) self.load_backends() new_config = {'ldap': {'url': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) + CONF.identity.default_domain_id, new_config + ) # Trigger the identity backend to initialise any domain specific # configurations PROVIDERS.identity_api.list_users() # Check that the new config has not been passed to the driver for # the default domain. - default_config = ( - PROVIDERS.identity_api.domain_configs.get_domain_conf( - CONF.identity.default_domain_id)) + default_config = PROVIDERS.identity_api.domain_configs.get_domain_conf( + CONF.identity.default_domain_id + ) self.assertEqual(CONF.ldap.url, default_config.ldap.url) def test_reloading_domain_config(self): @@ -2907,20 +3181,27 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): # current settings. new_config = { 'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': 'ldap'}} + 'identity': {'driver': 'ldap'}, + } PROVIDERS.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) - default_config = ( - domain_cfgs.get_domain_conf(CONF.identity.default_domain_id)) + CONF.identity.default_domain_id, new_config + ) + default_config = domain_cfgs.get_domain_conf( + CONF.identity.default_domain_id + ) self.assertEqual(new_config['ldap']['url'], default_config.ldap.url) # Ensure updating is also honored updated_config = {'url': uuid.uuid4().hex} PROVIDERS.domain_config_api.update_config( - CONF.identity.default_domain_id, updated_config, - group='ldap', option='url') - default_config = ( - domain_cfgs.get_domain_conf(CONF.identity.default_domain_id)) + CONF.identity.default_domain_id, + updated_config, + group='ldap', + option='url', + ) + default_config = domain_cfgs.get_domain_conf( + CONF.identity.default_domain_id + ) self.assertEqual(updated_config['url'], default_config.ldap.url) # ...and finally ensure delete causes the driver to get the standard @@ -2928,42 +3209,51 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): PROVIDERS.domain_config_api.delete_config( CONF.identity.default_domain_id ) - default_config = ( - domain_cfgs.get_domain_conf(CONF.identity.default_domain_id)) + default_config = domain_cfgs.get_domain_conf( + CONF.identity.default_domain_id + ) self.assertEqual(CONF.ldap.url, default_config.ldap.url) def test_setting_multiple_sql_driver_raises_exception(self): """Ensure setting multiple domain specific sql drivers is prevented.""" new_config = {'identity': {'driver': 'sql'}} PROVIDERS.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) + CONF.identity.default_domain_id, new_config + ) PROVIDERS.identity_api.domain_configs.get_domain_conf( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) PROVIDERS.domain_config_api.create_config( self.domains['domain1']['id'], new_config ) self.assertRaises( exception.MultipleSQLDriversInConfig, PROVIDERS.identity_api.domain_configs.get_domain_conf, - self.domains['domain1']['id'] + self.domains['domain1']['id'], ) def test_same_domain_gets_sql_driver(self): """Ensure we can set an SQL driver if we have had it before.""" new_config = {'identity': {'driver': 'sql'}} PROVIDERS.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) + CONF.identity.default_domain_id, new_config + ) PROVIDERS.identity_api.domain_configs.get_domain_conf( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) # By using a slightly different config, we cause the driver to be # reloaded...and hence check if we can reuse the sql driver - new_config = {'identity': {'driver': 'sql'}, - 'ldap': {'url': 'fake://memory1'}} + new_config = { + 'identity': {'driver': 'sql'}, + 'ldap': {'url': 'fake://memory1'}, + } PROVIDERS.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) + CONF.identity.default_domain_id, new_config + ) PROVIDERS.identity_api.domain_configs.get_domain_conf( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) def test_delete_domain_clears_sql_registration(self): """Ensure registration is deleted when a domain is deleted.""" @@ -2980,7 +3270,7 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): self.assertRaises( exception.MultipleSQLDriversInConfig, PROVIDERS.identity_api.domain_configs.get_domain_conf, - self.domains['domain1']['id'] + self.domains['domain1']['id'], ) PROVIDERS.domain_config_api.delete_config( self.domains['domain1']['id'] @@ -3014,7 +3304,7 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): self.assertRaises( exception.MultipleSQLDriversInConfig, PROVIDERS.identity_api.domain_configs.get_domain_conf, - self.domains['domain1']['id'] + self.domains['domain1']['id'], ) # Now we delete the domain by using the backend driver directly, @@ -3028,19 +3318,24 @@ class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): PROVIDERS.resource_api.driver.delete_project(domain['id']) # Invalidate cache (so we will see the domain has gone) PROVIDERS.resource_api.get_domain.invalidate( - PROVIDERS.resource_api, domain['id']) + PROVIDERS.resource_api, domain['id'] + ) # The registration should now be available PROVIDERS.domain_config_api.create_config( self.domains['domain1']['id'], new_config ) PROVIDERS.identity_api.domain_configs.get_domain_conf( - self.domains['domain1']['id']) + self.domains['domain1']['id'] + ) class DomainSpecificLDAPandSQLIdentity( - BaseLDAPIdentity, unit.SQLDriverOverrides, unit.TestCase, - BaseMultiLDAPandSQLIdentity): + BaseLDAPIdentity, + unit.SQLDriverOverrides, + unit.TestCase, + BaseMultiLDAPandSQLIdentity, +): """Class to test when all domains use specific configs, including SQL. We define a set of domains and domain-specific backends: @@ -3063,7 +3358,8 @@ class DomainSpecificLDAPandSQLIdentity( def load_fixtures(self, fixtures): PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) self.setup_initial_domains() super(DomainSpecificLDAPandSQLIdentity, self).load_fixtures(fixtures) @@ -3076,7 +3372,8 @@ class DomainSpecificLDAPandSQLIdentity( 'default': 'ldap', self.domains['domain1']['id']: 'sql', }, - resource='sql') + resource='sql', + ) def config_overrides(self): super(DomainSpecificLDAPandSQLIdentity, self).config_overrides() @@ -3088,12 +3385,16 @@ class DomainSpecificLDAPandSQLIdentity( # We aren't setting up any initial data ahead of switching to # domain-specific operation, so make the switch straight away. self.config_fixture.config( - group='identity', domain_specific_drivers_enabled=True, + group='identity', + domain_specific_drivers_enabled=True, domain_config_dir=( - unit.TESTCONF + '/domain_configs_one_sql_one_ldap')) + unit.TESTCONF + '/domain_configs_one_sql_one_ldap' + ), + ) - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=False + ) def get_config(self, domain_id): # Get the config for this domain, will return CONF @@ -3108,7 +3409,8 @@ class DomainSpecificLDAPandSQLIdentity( # domains and identity, are still not supported self.assertRaises( exception.DomainNotFound, - super(BaseLDAPIdentity, self).test_delete_domain_with_project_api) + super(BaseLDAPIdentity, self).test_delete_domain_with_project_api, + ) def test_list_users(self): _users = self.create_users_across_domains() @@ -3118,11 +3420,15 @@ class DomainSpecificLDAPandSQLIdentity( # than in the standard test. users = PROVIDERS.identity_api.list_users( domain_scope=self._set_domain_scope( - CONF.identity.default_domain_id)) + CONF.identity.default_domain_id + ) + ) self.assertEqual(len(default_fixtures.USERS) + 1, len(users)) user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['name'])['id'] - for user in default_fixtures.USERS) + expected_user_ids = set( + getattr(self, 'user_%s' % user['name'])['id'] + for user in default_fixtures.USERS + ) expected_user_ids.add(_users['user0']['id']) for user_ref in users: self.assertNotIn('password', user_ref) @@ -3144,24 +3450,31 @@ class DomainSpecificLDAPandSQLIdentity( # Check that I can read a user with the appropriate domain-selected # driver, but won't find it via any other domain driver - self.check_user(users['user0'], - self.domain_default['id'], http.client.OK) - self.check_user(users['user0'], - self.domains['domain1']['id'], exception.UserNotFound) + self.check_user( + users['user0'], self.domain_default['id'], http.client.OK + ) + self.check_user( + users['user0'], + self.domains['domain1']['id'], + exception.UserNotFound, + ) - self.check_user(users['user1'], - self.domains['domain1']['id'], http.client.OK) - self.check_user(users['user1'], - self.domain_default['id'], - exception.UserNotFound) + self.check_user( + users['user1'], self.domains['domain1']['id'], http.client.OK + ) + self.check_user( + users['user1'], self.domain_default['id'], exception.UserNotFound + ) # Finally, going through the regular manager layer, make sure we # only see the right number of users in the non-default domain. self.assertThat( PROVIDERS.identity_api.list_users( - domain_scope=self.domains['domain1']['id']), - matchers.HasLength(1)) + domain_scope=self.domains['domain1']['id'] + ), + matchers.HasLength(1), + ) def test_get_domain_mapping_list_is_used(self): # before get_domain_mapping_list was introduced, it was required to @@ -3169,30 +3482,40 @@ class DomainSpecificLDAPandSQLIdentity( # get_domain_mapping_list solves this problem and should be used # when multiple users are fetched from domain-specific backend. for i in range(5): - unit.create_user(PROVIDERS.identity_api, - domain_id=self.domains['domain1']['id']) + unit.create_user( + PROVIDERS.identity_api, domain_id=self.domains['domain1']['id'] + ) - with mock.patch.multiple(PROVIDERS.id_mapping_api, - get_domain_mapping_list=mock.DEFAULT, - get_id_mapping=mock.DEFAULT) as mocked: + with mock.patch.multiple( + PROVIDERS.id_mapping_api, + get_domain_mapping_list=mock.DEFAULT, + get_id_mapping=mock.DEFAULT, + ) as mocked: PROVIDERS.identity_api.list_users( - domain_scope=self.domains['domain1']['id']) + domain_scope=self.domains['domain1']['id'] + ) mocked['get_domain_mapping_list'].assert_called() mocked['get_id_mapping'].assert_not_called() def test_user_id_comma(self): - self.skip_test_overrides('Only valid if it is guaranteed to be ' - 'talking to the fakeldap backend') + self.skip_test_overrides( + 'Only valid if it is guaranteed to be ' + 'talking to the fakeldap backend' + ) def test_user_enabled_ignored_disable_error(self): # Override. - self.skip_test_overrides("Doesn't apply since LDAP config has no " - "affect on the SQL identity backend.") + self.skip_test_overrides( + "Doesn't apply since LDAP config has no " + "affect on the SQL identity backend." + ) def test_group_enabled_ignored_disable_error(self): # Override. - self.skip_test_overrides("Doesn't apply since LDAP config has no " - "affect on the SQL identity backend.") + self.skip_test_overrides( + "Doesn't apply since LDAP config has no " + "affect on the SQL identity backend." + ) def test_list_role_assignments_filtered_by_role(self): # Domain roles are supported by the SQL Assignment backend @@ -3204,7 +3527,8 @@ class DomainSpecificLDAPandSQLIdentity( # domains and identity, are still not supported self.assertRaises( exception.DomainNotFound, - super(BaseLDAPIdentity, self).test_delete_domain_with_project_api) + super(BaseLDAPIdentity, self).test_delete_domain_with_project_api, + ) def test_create_project_with_domain_id_and_without_parent_id(self): # With restricted multi LDAP, tests that don't use identity, but do @@ -3222,13 +3546,15 @@ class DomainSpecificLDAPandSQLIdentity( # With this restricted multi LDAP class, tests that use multiple # domains and identity, are still not supported self.skip_test_overrides( - 'Restricted multi LDAP class does not support multiple domains') + 'Restricted multi LDAP class does not support multiple domains' + ) def test_list_limit_for_domains(self): # With this restricted multi LDAP class, tests that use multiple # domains and identity, are still not supported self.skip_test_overrides( - 'Restricted multi LDAP class does not support multiple domains') + 'Restricted multi LDAP class does not support multiple domains' + ) class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity): @@ -3248,10 +3574,9 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity): DOMAIN_SPECIFIC_COUNT = 1 def assert_backends(self): - _assert_backends(self, - assignment='sql', - identity='ldap', - resource='sql') + _assert_backends( + self, assignment='sql', identity='ldap', resource='sql' + ) def config_overrides(self): super(DomainSpecificSQLIdentity, self).config_overrides() @@ -3260,15 +3585,19 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity): # We aren't setting up any initial data ahead of switching to # domain-specific operation, so make the switch straight away. self.config_fixture.config( - group='identity', domain_specific_drivers_enabled=True, + group='identity', + domain_specific_drivers_enabled=True, domain_config_dir=( - unit.TESTCONF + '/domain_configs_default_ldap_one_sql')) + unit.TESTCONF + '/domain_configs_default_ldap_one_sql' + ), + ) # Part of the testing counts how many new mappings get created as # we create users, so ensure we are NOT using mapping for the default # LDAP domain so this doesn't confuse the calculation. - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=True) + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=True + ) def get_config(self, domain_id): if domain_id == CONF.identity.default_domain_id: @@ -3286,7 +3615,8 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity): self.load_backends() # Make any identity call to initiate the lazy loading of configs PROVIDERS.identity_api.list_users( - domain_scope=CONF.identity.default_domain_id) + domain_scope=CONF.identity.default_domain_id + ) self.assertIsNotNone(self.get_config(self.domains['domain1']['id'])) # Now re-initialize, but with sql as the identity driver @@ -3295,9 +3625,11 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity): self.load_backends() # Make any identity call to initiate the lazy loading of configs, which # should fail since we would now have two sql drivers. - self.assertRaises(exception.MultipleSQLDriversInConfig, - PROVIDERS.identity_api.list_users, - domain_scope=CONF.identity.default_domain_id) + self.assertRaises( + exception.MultipleSQLDriversInConfig, + PROVIDERS.identity_api.list_users, + domain_scope=CONF.identity.default_domain_id, + ) def test_multiple_sql_specific_drivers_fails(self): self.config_fixture.config(group='identity', driver='ldap') @@ -3308,7 +3640,8 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity): self.setup_initial_domains() # Make any identity call to initiate the lazy loading of configs PROVIDERS.identity_api.list_users( - domain_scope=CONF.identity.default_domain_id) + domain_scope=CONF.identity.default_domain_id + ) # This will only load domain1, since the domain2 config file is # not stored in the same location self.assertIsNotNone(self.get_config(self.domains['domain1']['id'])) @@ -3319,13 +3652,18 @@ class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity): exception.MultipleSQLDriversInConfig, PROVIDERS.identity_api.domain_configs._load_config_from_file, PROVIDERS.resource_api, - [unit.TESTCONF + '/domain_configs_one_extra_sql/' + - 'keystone.domain2.conf'], - 'domain2') + [ + unit.TESTCONF + + '/domain_configs_one_extra_sql/' + + 'keystone.domain2.conf' + ], + 'domain2', + ) -class LdapFilterTests(identity_tests.FilterTests, LDAPTestSetup, - unit.TestCase): +class LdapFilterTests( + identity_tests.FilterTests, LDAPTestSetup, unit.TestCase +): def assert_backends(self): _assert_backends(self, identity='ldap') @@ -3377,7 +3715,8 @@ class LDAPMatchingRuleInChainTests(LDAPTestSetup, unit.TestCase): url='fake://memory', chase_referrals=False, group_tree_dn='cn=UserGroups,cn=example,cn=com', - query_scope='one') + query_scope='one', + ) def config_files(self): config_files = super(LDAPMatchingRuleInChainTests, self).config_files() diff --git a/keystone/tests/unit/test_backend_ldap_pool.py b/keystone/tests/unit/test_backend_ldap_pool.py index 1c4b198045..5f5c22a030 100644 --- a/keystone/tests/unit/test_backend_ldap_pool.py +++ b/keystone/tests/unit/test_backend_ldap_pool.py @@ -48,37 +48,42 @@ class LdapPoolCommonTestMixin(object): @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'connect') @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'simple_bind_s') - def test_handler_with_use_pool_not_enabled(self, bind_method, - connect_method): + def test_handler_with_use_pool_not_enabled( + self, bind_method, connect_method + ): self.config_fixture.config(group='ldap', use_pool=False) self.config_fixture.config(group='ldap', use_auth_pool=True) self.cleanup_pools() user_api = ldap.UserApi(CONF) - handler = user_api.get_connection(user=None, password=None, - end_user_auth=True) + handler = user_api.get_connection( + user=None, password=None, end_user_auth=True + ) # use_auth_pool flag does not matter when use_pool is False # still handler is non pool version self.assertIsInstance(handler.conn, common_ldap.PythonLDAPHandler) @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'connect') @mock.patch.object(common_ldap.KeystoneLDAPHandler, 'simple_bind_s') - def test_handler_with_end_user_auth_use_pool_not_enabled(self, bind_method, - connect_method): + def test_handler_with_end_user_auth_use_pool_not_enabled( + self, bind_method, connect_method + ): # by default use_pool is enabled in test pool config # now disabling use_auth_pool flag to test handler instance self.config_fixture.config(group='ldap', use_auth_pool=False) self.cleanup_pools() user_api = ldap.UserApi(CONF) - handler = user_api.get_connection(user=None, password=None, - end_user_auth=True) + handler = user_api.get_connection( + user=None, password=None, end_user_auth=True + ) self.assertIsInstance(handler.conn, common_ldap.PythonLDAPHandler) # For end_user_auth case, flag should not be false otherwise # it will use, admin connections ldap pool - handler = user_api.get_connection(user=None, password=None, - end_user_auth=False) + handler = user_api.get_connection( + user=None, password=None, end_user_auth=False + ) self.assertIsInstance(handler.conn, common_ldap.PooledLDAPHandler) def test_pool_size_set(self): @@ -107,8 +112,9 @@ class LdapPoolCommonTestMixin(object): def test_pool_timeout_set(self): # get related connection manager instance ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.pool_connection_timeout, - ldappool_cm.timeout) + self.assertEqual( + CONF.ldap.pool_connection_timeout, ldappool_cm.timeout + ) def test_pool_use_pool_set(self): # get related connection manager instance @@ -118,8 +124,9 @@ class LdapPoolCommonTestMixin(object): def test_pool_connection_lifetime_set(self): # get related connection manager instance ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.pool_connection_lifetime, - ldappool_cm.max_lifetime) + self.assertEqual( + CONF.ldap.pool_connection_lifetime, ldappool_cm.max_lifetime + ) def test_max_connection_error_raised(self): @@ -137,8 +144,9 @@ class LdapPoolCommonTestMixin(object): _.unbind_s() self.fail() except Exception as ex: - self.assertIsInstance(ex, - ldappool.MaxConnectionReachedError) + self.assertIsInstance( + ex, ldappool.MaxConnectionReachedError + ) ldappool_cm.size = CONF.ldap.pool_size def test_pool_size_expands_correctly(self): @@ -190,8 +198,8 @@ class LdapPoolCommonTestMixin(object): # change with self.make_request(): user_ref = PROVIDERS.identity_api.authenticate( - user_id=self.user_sna['id'], - password=self.user_sna['password']) + user_id=self.user_sna['id'], password=self.user_sna['password'] + ) self.user_sna.pop('password') self.user_sna['enabled'] = True @@ -205,8 +213,8 @@ class LdapPoolCommonTestMixin(object): # connection pool with self.make_request(): user_ref2 = PROVIDERS.identity_api.authenticate( - user_id=self.user_sna['id'], - password=new_password) + user_id=self.user_sna['id'], password=new_password + ) user_ref.pop('password') self.assertUserDictEqual(user_ref, user_ref2) @@ -215,10 +223,12 @@ class LdapPoolCommonTestMixin(object): # is only one connection in pool which get bind again with updated # password..so no old bind is maintained in this case. with self.make_request(): - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - user_id=self.user_sna['id'], - password=old_password) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + user_id=self.user_sna['id'], + password=old_password, + ) @mock.patch.object(fakeldap.FakeLdap, 'search_ext') def test_search_ext_ensure_pool_connection_released(self, mock_search_ext): @@ -231,6 +241,7 @@ class LdapPoolCommonTestMixin(object): test case intentionally throws an exception to ensure everything goes as expected when LDAP connection raises an exception. """ + class CustomDummyException(Exception): pass @@ -257,8 +268,8 @@ class LdapPoolCommonTestMixin(object): 'dc=example,dc=test', 'dummy', 'objectclass=*', - ['mail', 'userPassword'] - ) + ['mail', 'userPassword'], + ), ) # Pooled connection must not be evicted from the pool self.assertEqual(1, len(pool)) @@ -279,6 +290,7 @@ class LdapPoolCommonTestMixin(object): connection for AsynchronousMessage must be released back to the LDAP connection pool. """ + class CustomDummyException(Exception): pass @@ -300,7 +312,7 @@ class LdapPoolCommonTestMixin(object): 'dc=example,dc=test', 'dummy', 'objectclass=*', - ['mail', 'userPassword'] + ['mail', 'userPassword'], ) # Connection is in use, must be already marked active self.assertTrue(msg.connection.active) @@ -310,8 +322,7 @@ class LdapPoolCommonTestMixin(object): # scenario we expect LDAP connection to be made # available back to the pool. self.assertRaises( - CustomDummyException, - lambda: handler.result3(msg) + CustomDummyException, lambda: handler.result3(msg) ) # Connection must be set inactive self.assertFalse(msg.connection.active) @@ -320,14 +331,19 @@ class LdapPoolCommonTestMixin(object): self.assertEqual(mock_result3.call_count, i) -class LDAPIdentity(LdapPoolCommonTestMixin, - test_backend_ldap.LDAPIdentity, - unit.TestCase): +class LDAPIdentity( + LdapPoolCommonTestMixin, test_backend_ldap.LDAPIdentity, unit.TestCase +): """Executes tests in existing base class with pooled LDAP handler.""" def setUp(self): - self.useFixture(fixtures.MockPatchObject( - common_ldap.PooledLDAPHandler, 'Connector', fakeldap.FakeLdapPool)) + self.useFixture( + fixtures.MockPatchObject( + common_ldap.PooledLDAPHandler, + 'Connector', + fakeldap.FakeLdapPool, + ) + ) super(LDAPIdentity, self).setUp() self.addCleanup(self.cleanup_pools) diff --git a/keystone/tests/unit/test_backend_rules.py b/keystone/tests/unit/test_backend_rules.py index c32c330719..61083ab0cc 100644 --- a/keystone/tests/unit/test_backend_rules.py +++ b/keystone/tests/unit/test_backend_rules.py @@ -28,36 +28,44 @@ class RulesPolicy(unit.TestCase, policy_tests.PolicyTests): self.config_fixture.config(group='policy', driver='rules') def test_create(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_create) + self.assertRaises( + exception.NotImplemented, super(RulesPolicy, self).test_create + ) def test_get(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_get) + self.assertRaises( + exception.NotImplemented, super(RulesPolicy, self).test_get + ) def test_list(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_list) + self.assertRaises( + exception.NotImplemented, super(RulesPolicy, self).test_list + ) def test_update(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_update) + self.assertRaises( + exception.NotImplemented, super(RulesPolicy, self).test_update + ) def test_delete(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_delete) + self.assertRaises( + exception.NotImplemented, super(RulesPolicy, self).test_delete + ) def test_get_policy_returns_not_found(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, - self).test_get_policy_returns_not_found) + self.assertRaises( + exception.NotImplemented, + super(RulesPolicy, self).test_get_policy_returns_not_found, + ) def test_update_policy_returns_not_found(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, - self).test_update_policy_returns_not_found) + self.assertRaises( + exception.NotImplemented, + super(RulesPolicy, self).test_update_policy_returns_not_found, + ) def test_delete_policy_returns_not_found(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, - self).test_delete_policy_returns_not_found) + self.assertRaises( + exception.NotImplemented, + super(RulesPolicy, self).test_delete_policy_returns_not_found, + ) diff --git a/keystone/tests/unit/test_backend_sql.py b/keystone/tests/unit/test_backend_sql.py index 6c8ed5eb35..5d43a13ceb 100644 --- a/keystone/tests/unit/test_backend_sql.py +++ b/keystone/tests/unit/test_backend_sql.py @@ -138,9 +138,7 @@ class DataTypeRoundTrips(SqlTests): datetime_value = datetime.datetime(2019, 5, 15, 10, 17, 55) val = session.scalar( sqlalchemy.select( - sqlalchemy.literal( - datetime_value, type_=core.DateTimeInt - ), + sqlalchemy.literal(datetime_value, type_=core.DateTimeInt), ) ) @@ -156,7 +154,7 @@ class DataTypeRoundTrips(SqlTests): sqlalchemy.literal( datetime_value, type_=core.DateTimeInt ), - sqlalchemy.Integer + sqlalchemy.Integer, ), ) ) @@ -226,135 +224,156 @@ class SqlModels(SqlTests): if column.default: default = column.default.arg actual_schema.append((column.name, type(column.type), default)) - elif (hasattr(column.type, 'length') and - not isinstance(column.type, sql.Enum)): + elif hasattr(column.type, 'length') and not isinstance( + column.type, sql.Enum + ): # NOTE(dstanek): Even though sql.Enum columns have a length # set we don't want to catch them here. Maybe in the future # we'll check to see that they contain a list of the correct # possible values. - actual_schema.append((column.name, - type(column.type), - column.type.length)) + actual_schema.append( + (column.name, type(column.type), column.type.length) + ) else: actual_schema.append((column.name, type(column.type), None)) self.assertCountEqual(expected_schema, actual_schema) def test_user_model(self): - cols = (('id', sql.String, 64), - ('domain_id', sql.String, 64), - ('default_project_id', sql.String, 64), - ('enabled', sql.Boolean, None), - ('extra', sql.JsonBlob, None), - ('created_at', sql.DateTime, None), - ('last_active_at', sqlalchemy.Date, None)) + cols = ( + ('id', sql.String, 64), + ('domain_id', sql.String, 64), + ('default_project_id', sql.String, 64), + ('enabled', sql.Boolean, None), + ('extra', sql.JsonBlob, None), + ('created_at', sql.DateTime, None), + ('last_active_at', sqlalchemy.Date, None), + ) self.assertExpectedSchema('user', cols) def test_local_user_model(self): - cols = (('id', sql.Integer, None), - ('user_id', sql.String, 64), - ('name', sql.String, 255), - ('domain_id', sql.String, 64), - ('failed_auth_count', sql.Integer, None), - ('failed_auth_at', sql.DateTime, None)) + cols = ( + ('id', sql.Integer, None), + ('user_id', sql.String, 64), + ('name', sql.String, 255), + ('domain_id', sql.String, 64), + ('failed_auth_count', sql.Integer, None), + ('failed_auth_at', sql.DateTime, None), + ) self.assertExpectedSchema('local_user', cols) def test_password_model(self): - cols = (('id', sql.Integer, None), - ('local_user_id', sql.Integer, None), - ('password_hash', sql.String, 255), - ('created_at', sql.DateTime, None), - ('expires_at', sql.DateTime, None), - ('created_at_int', sql.DateTimeInt, None), - ('expires_at_int', sql.DateTimeInt, None), - ('self_service', sql.Boolean, False)) + cols = ( + ('id', sql.Integer, None), + ('local_user_id', sql.Integer, None), + ('password_hash', sql.String, 255), + ('created_at', sql.DateTime, None), + ('expires_at', sql.DateTime, None), + ('created_at_int', sql.DateTimeInt, None), + ('expires_at_int', sql.DateTimeInt, None), + ('self_service', sql.Boolean, False), + ) self.assertExpectedSchema('password', cols) def test_federated_user_model(self): - cols = (('id', sql.Integer, None), - ('user_id', sql.String, 64), - ('idp_id', sql.String, 64), - ('protocol_id', sql.String, 64), - ('unique_id', sql.String, 255), - ('display_name', sql.String, 255)) + cols = ( + ('id', sql.Integer, None), + ('user_id', sql.String, 64), + ('idp_id', sql.String, 64), + ('protocol_id', sql.String, 64), + ('unique_id', sql.String, 255), + ('display_name', sql.String, 255), + ) self.assertExpectedSchema('federated_user', cols) def test_nonlocal_user_model(self): - cols = (('domain_id', sql.String, 64), - ('name', sql.String, 255), - ('user_id', sql.String, 64)) + cols = ( + ('domain_id', sql.String, 64), + ('name', sql.String, 255), + ('user_id', sql.String, 64), + ) self.assertExpectedSchema('nonlocal_user', cols) def test_group_model(self): - cols = (('id', sql.String, 64), - ('name', sql.String, 64), - ('description', sql.Text, None), - ('domain_id', sql.String, 64), - ('extra', sql.JsonBlob, None)) + cols = ( + ('id', sql.String, 64), + ('name', sql.String, 64), + ('description', sql.Text, None), + ('domain_id', sql.String, 64), + ('extra', sql.JsonBlob, None), + ) self.assertExpectedSchema('group', cols) def test_project_model(self): - cols = (('id', sql.String, 64), - ('name', sql.String, 64), - ('description', sql.Text, None), - ('domain_id', sql.String, 64), - ('enabled', sql.Boolean, None), - ('extra', sql.JsonBlob, None), - ('parent_id', sql.String, 64), - ('is_domain', sql.Boolean, False)) + cols = ( + ('id', sql.String, 64), + ('name', sql.String, 64), + ('description', sql.Text, None), + ('domain_id', sql.String, 64), + ('enabled', sql.Boolean, None), + ('extra', sql.JsonBlob, None), + ('parent_id', sql.String, 64), + ('is_domain', sql.Boolean, False), + ) self.assertExpectedSchema('project', cols) def test_role_assignment_model(self): - cols = (('type', sql.Enum, None), - ('actor_id', sql.String, 64), - ('target_id', sql.String, 64), - ('role_id', sql.String, 64), - ('inherited', sql.Boolean, False)) + cols = ( + ('type', sql.Enum, None), + ('actor_id', sql.String, 64), + ('target_id', sql.String, 64), + ('role_id', sql.String, 64), + ('inherited', sql.Boolean, False), + ) self.assertExpectedSchema('assignment', cols) def test_user_group_membership(self): - cols = (('group_id', sql.String, 64), - ('user_id', sql.String, 64)) + cols = (('group_id', sql.String, 64), ('user_id', sql.String, 64)) self.assertExpectedSchema('user_group_membership', cols) def test_revocation_event_model(self): - cols = (('id', sql.Integer, None), - ('domain_id', sql.String, 64), - ('project_id', sql.String, 64), - ('user_id', sql.String, 64), - ('role_id', sql.String, 64), - ('trust_id', sql.String, 64), - ('consumer_id', sql.String, 64), - ('access_token_id', sql.String, 64), - ('issued_before', sql.DateTime, None), - ('expires_at', sql.DateTime, None), - ('revoked_at', sql.DateTime, None), - ('audit_id', sql.String, 32), - ('audit_chain_id', sql.String, 32)) + cols = ( + ('id', sql.Integer, None), + ('domain_id', sql.String, 64), + ('project_id', sql.String, 64), + ('user_id', sql.String, 64), + ('role_id', sql.String, 64), + ('trust_id', sql.String, 64), + ('consumer_id', sql.String, 64), + ('access_token_id', sql.String, 64), + ('issued_before', sql.DateTime, None), + ('expires_at', sql.DateTime, None), + ('revoked_at', sql.DateTime, None), + ('audit_id', sql.String, 32), + ('audit_chain_id', sql.String, 32), + ) self.assertExpectedSchema('revocation_event', cols) def test_project_tags_model(self): - cols = (('project_id', sql.String, 64), - ('name', sql.Unicode, 255)) + cols = (('project_id', sql.String, 64), ('name', sql.Unicode, 255)) self.assertExpectedSchema('project_tag', cols) -class SqlIdentity(SqlTests, - identity_tests.IdentityTests, - assignment_tests.AssignmentTests, - assignment_tests.SystemAssignmentTests, - resource_tests.ResourceTests): +class SqlIdentity( + SqlTests, + identity_tests.IdentityTests, + assignment_tests.AssignmentTests, + assignment_tests.SystemAssignmentTests, + resource_tests.ResourceTests, +): def test_password_hashed(self): with sql.session_for_read() as session: user_ref = PROVIDERS.identity_api._get_user( session, self.user_foo['id'] ) - self.assertNotEqual(self.user_foo['password'], - user_ref['password']) + self.assertNotEqual( + self.user_foo['password'], user_ref['password'] + ) def test_create_user_with_null_password(self): user_dict = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) user_dict["password"] = None new_user_dict = PROVIDERS.identity_api.create_user(user_dict) with sql.session_for_read() as session: @@ -365,7 +384,8 @@ class SqlIdentity(SqlTests, def test_update_user_with_null_password(self): user_dict = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) self.assertTrue(user_dict['password']) new_user_dict = PROVIDERS.identity_api.create_user(user_dict) new_user_dict["password"] = None @@ -387,9 +407,11 @@ class SqlIdentity(SqlTests, user['id'], self.project_bar['id'], role_member['id'] ) PROVIDERS.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - PROVIDERS.assignment_api.list_projects_for_user, - user['id']) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.assignment_api.list_projects_for_user, + user['id'], + ) def test_create_user_case_sensitivity(self): # user name case sensitivity is down to the fact that it is marked as @@ -397,8 +419,10 @@ class SqlIdentity(SqlTests, # LDAP. # create a ref with a lowercase name - ref = unit.new_user_ref(name=uuid.uuid4().hex.lower(), - domain_id=CONF.identity.default_domain_id) + ref = unit.new_user_ref( + name=uuid.uuid4().hex.lower(), + domain_id=CONF.identity.default_domain_id, + ) ref = PROVIDERS.identity_api.create_user(ref) # assign a new ID with the same name, but this time in uppercase @@ -444,7 +468,8 @@ class SqlIdentity(SqlTests, arbitrary_key = uuid.uuid4().hex arbitrary_value = uuid.uuid4().hex project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) project[arbitrary_key] = arbitrary_value ref = PROVIDERS.resource_api.create_project(project['id'], project) self.assertEqual(arbitrary_value, ref[arbitrary_key]) @@ -511,12 +536,14 @@ class SqlIdentity(SqlTests, ) self.assertEqual(0, len(user_domains)) PROVIDERS.assignment_api.create_grant( - user_id=user['id'], domain_id=test_domain1['id'], - role_id=self.role_member['id'] + user_id=user['id'], + domain_id=test_domain1['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - user_id=user['id'], domain_id=test_domain2['id'], - role_id=self.role_member['id'] + user_id=user['id'], + domain_id=test_domain2['id'], + role_id=self.role_member['id'], ) user_domains = PROVIDERS.assignment_api.list_domains_for_user( user['id'] @@ -548,16 +575,19 @@ class SqlIdentity(SqlTests, # Create 3 grants, one user grant, the other two as group grants PROVIDERS.assignment_api.create_grant( - user_id=user['id'], domain_id=test_domain1['id'], - role_id=self.role_member['id'] + user_id=user['id'], + domain_id=test_domain1['id'], + role_id=self.role_member['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group1['id'], domain_id=test_domain2['id'], - role_id=self.role_admin['id'] + group_id=group1['id'], + domain_id=test_domain2['id'], + role_id=self.role_admin['id'], ) PROVIDERS.assignment_api.create_grant( - group_id=group2['id'], domain_id=test_domain3['id'], - role_id=self.role_admin['id'] + group_id=group2['id'], + domain_id=test_domain3['id'], + role_id=self.role_admin['id'], ) user_domains = PROVIDERS.assignment_api.list_domains_for_user( user['id'] @@ -590,12 +620,16 @@ class SqlIdentity(SqlTests, # Create a grant on each domain, one user grant, one group grant, # both inherited. PROVIDERS.assignment_api.create_grant( - user_id=user['id'], domain_id=domain1['id'], role_id=role['id'], - inherited_to_projects=True + user_id=user['id'], + domain_id=domain1['id'], + role_id=role['id'], + inherited_to_projects=True, ) PROVIDERS.assignment_api.create_grant( - group_id=group['id'], domain_id=domain2['id'], role_id=role['id'], - inherited_to_projects=True + group_id=group['id'], + domain_id=domain2['id'], + role_id=role['id'], + inherited_to_projects=True, ) user_domains = PROVIDERS.assignment_api.list_domains_for_user( @@ -621,7 +655,8 @@ class SqlIdentity(SqlTests, for x in range(0, USER_COUNT): group_refs = PROVIDERS.identity_api.list_groups_for_user( - test_users[x]['id']) + test_users[x]['id'] + ) self.assertEqual(0, len(group_refs)) for x in range(0, GROUP_COUNT): @@ -634,18 +669,21 @@ class SqlIdentity(SqlTests, # add the user to the group and ensure that the # group count increases by one for each group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(before_count, len(group_refs)) PROVIDERS.identity_api.add_user_to_group( - positive_user['id'], - new_group['id']) + positive_user['id'], new_group['id'] + ) group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(after_count, len(group_refs)) # Make sure the group count for the unrelated user did not change group_refs = PROVIDERS.identity_api.list_groups_for_user( - negative_user['id']) + negative_user['id'] + ) self.assertEqual(0, len(group_refs)) # remove the user from each group and ensure that @@ -654,25 +692,29 @@ class SqlIdentity(SqlTests, before_count = GROUP_COUNT - x after_count = GROUP_COUNT - x - 1 group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(before_count, len(group_refs)) PROVIDERS.identity_api.remove_user_from_group( - positive_user['id'], - test_groups[x]['id']) + positive_user['id'], test_groups[x]['id'] + ) group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(after_count, len(group_refs)) # Make sure the group count for the unrelated user # did not change group_refs = PROVIDERS.identity_api.list_groups_for_user( - negative_user['id']) + negative_user['id'] + ) self.assertEqual(0, len(group_refs)) def test_add_user_to_group_expiring_mapped(self): self._build_fed_resource() domain = self._get_domain_fixture() - self.config_fixture.config(group='federation', - default_authorization_ttl=5) + self.config_fixture.config( + group='federation', default_authorization_ttl=5 + ) time = datetime.datetime.utcnow() tick = datetime.timedelta(minutes=5) @@ -689,25 +731,35 @@ class SqlIdentity(SqlTests, with freezegun.freeze_time(time - tick) as frozen_time: user = PROVIDERS.identity_api.shadow_federated_user( - fed_dict['idp_id'], fed_dict['protocol_id'], - fed_dict, group_ids=[new_group['id']]) + fed_dict['idp_id'], + fed_dict['protocol_id'], + fed_dict, + group_ids=[new_group['id']], + ) PROVIDERS.identity_api.check_user_in_group( - user['id'], new_group['id']) + user['id'], new_group['id'] + ) # Expiration frozen_time.tick(tick) - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.check_user_in_group, - user['id'], - new_group['id']) + self.assertRaises( + exception.NotFound, + PROVIDERS.identity_api.check_user_in_group, + user['id'], + new_group['id'], + ) # Renewal PROVIDERS.identity_api.shadow_federated_user( - fed_dict['idp_id'], fed_dict['protocol_id'], fed_dict, - group_ids=[new_group['id']]) - PROVIDERS.identity_api.check_user_in_group(user['id'], - new_group['id']) + fed_dict['idp_id'], + fed_dict['protocol_id'], + fed_dict, + group_ids=[new_group['id']], + ) + PROVIDERS.identity_api.check_user_in_group( + user['id'], new_group['id'] + ) def test_add_user_to_group_expiring(self): self._build_fed_resource() @@ -727,38 +779,49 @@ class SqlIdentity(SqlTests, with freezegun.freeze_time(time - tick) as frozen_time: PROVIDERS.shadow_users_api.add_user_to_group_expires( - new_user['id'], new_group['id']) + new_user['id'], new_group['id'] + ) - self.config_fixture.config(group='federation', - default_authorization_ttl=0) - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.check_user_in_group, - new_user['id'], - new_group['id']) + self.config_fixture.config( + group='federation', default_authorization_ttl=0 + ) + self.assertRaises( + exception.NotFound, + PROVIDERS.identity_api.check_user_in_group, + new_user['id'], + new_group['id'], + ) - self.config_fixture.config(group='federation', - default_authorization_ttl=5) - PROVIDERS.identity_api.check_user_in_group(new_user['id'], - new_group['id']) + self.config_fixture.config( + group='federation', default_authorization_ttl=5 + ) + PROVIDERS.identity_api.check_user_in_group( + new_user['id'], new_group['id'] + ) # Expiration frozen_time.tick(tick) - self.assertRaises(exception.NotFound, - PROVIDERS.identity_api.check_user_in_group, - new_user['id'], - new_group['id']) + self.assertRaises( + exception.NotFound, + PROVIDERS.identity_api.check_user_in_group, + new_user['id'], + new_group['id'], + ) # Renewal PROVIDERS.shadow_users_api.add_user_to_group_expires( - new_user['id'], new_group['id']) - PROVIDERS.identity_api.check_user_in_group(new_user['id'], - new_group['id']) + new_user['id'], new_group['id'] + ) + PROVIDERS.identity_api.check_user_in_group( + new_user['id'], new_group['id'] + ) def test_add_user_to_group_expiring_list(self): self._build_fed_resource() domain = self._get_domain_fixture() - self.config_fixture.config(group='federation', - default_authorization_ttl=5) + self.config_fixture.config( + group='federation', default_authorization_ttl=5 + ) time = datetime.datetime.utcnow() tick = datetime.timedelta(minutes=5) @@ -774,19 +837,24 @@ class SqlIdentity(SqlTests, domain['id'], fed_dict ) - PROVIDERS.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - PROVIDERS.identity_api.check_user_in_group(new_user['id'], - new_group['id']) + PROVIDERS.identity_api.add_user_to_group( + new_user['id'], new_group['id'] + ) + PROVIDERS.identity_api.check_user_in_group( + new_user['id'], new_group['id'] + ) with freezegun.freeze_time(time - tick) as frozen_time: PROVIDERS.shadow_users_api.add_user_to_group_expires( - new_user['id'], exp_new_group['id']) - PROVIDERS.identity_api.check_user_in_group(new_user['id'], - new_group['id']) + new_user['id'], exp_new_group['id'] + ) + PROVIDERS.identity_api.check_user_in_group( + new_user['id'], new_group['id'] + ) groups = PROVIDERS.identity_api.list_groups_for_user( - new_user['id']) + new_user['id'] + ) self.assertEqual(len(groups), 2) for group in groups: if group.get('membership_expires_at'): @@ -794,7 +862,8 @@ class SqlIdentity(SqlTests, frozen_time.tick(tick) groups = PROVIDERS.identity_api.list_groups_for_user( - new_user['id']) + new_user['id'] + ) self.assertEqual(len(groups), 1) def test_storing_null_domain_id_in_project_ref(self): @@ -808,7 +877,8 @@ class SqlIdentity(SqlTests, """ spoiler_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project( spoiler_project['id'], spoiler_project ) @@ -847,9 +917,11 @@ class SqlIdentity(SqlTests, project['enabled'] = False PROVIDERS.resource_api.update_project(project['id'], project) PROVIDERS.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project['id'], + ) def test_hidden_project_domain_root_is_really_hidden(self): """Ensure we cannot access the hidden root of all project domains. @@ -860,55 +932,64 @@ class SqlIdentity(SqlTests, specify their own ID for a new entity. """ + def _exercise_project_api(ref_id): driver = PROVIDERS.resource_api.driver - self.assertRaises(exception.ProjectNotFound, - driver.get_project, - ref_id) + self.assertRaises( + exception.ProjectNotFound, driver.get_project, ref_id + ) - self.assertRaises(exception.ProjectNotFound, - driver.get_project_by_name, - resource.NULL_DOMAIN_ID, - ref_id) + self.assertRaises( + exception.ProjectNotFound, + driver.get_project_by_name, + resource.NULL_DOMAIN_ID, + ref_id, + ) - project_ids = [x['id'] for x in - driver.list_projects(driver_hints.Hints())] + project_ids = [ + x['id'] for x in driver.list_projects(driver_hints.Hints()) + ] self.assertNotIn(ref_id, project_ids) projects = driver.list_projects_from_ids([ref_id]) self.assertThat(projects, matchers.HasLength(0)) - project_ids = [x for x in - driver.list_project_ids_from_domain_ids([ref_id])] + project_ids = [ + x for x in driver.list_project_ids_from_domain_ids([ref_id]) + ] self.assertNotIn(ref_id, project_ids) - self.assertRaises(exception.DomainNotFound, - driver.list_projects_in_domain, - ref_id) + self.assertRaises( + exception.DomainNotFound, + driver.list_projects_in_domain, + ref_id, + ) project_ids = [ - x['id'] for x in - driver.list_projects_acting_as_domain(driver_hints.Hints())] + x['id'] + for x in driver.list_projects_acting_as_domain( + driver_hints.Hints() + ) + ] self.assertNotIn(ref_id, project_ids) projects = driver.list_projects_in_subtree(ref_id) self.assertThat(projects, matchers.HasLength(0)) - self.assertRaises(exception.ProjectNotFound, - driver.list_project_parents, - ref_id) + self.assertRaises( + exception.ProjectNotFound, driver.list_project_parents, ref_id + ) # A non-existing project just returns True from the driver self.assertTrue(driver.is_leaf_project(ref_id)) - self.assertRaises(exception.ProjectNotFound, - driver.update_project, - ref_id, - {}) + self.assertRaises( + exception.ProjectNotFound, driver.update_project, ref_id, {} + ) - self.assertRaises(exception.ProjectNotFound, - driver.delete_project, - ref_id) + self.assertRaises( + exception.ProjectNotFound, driver.delete_project, ref_id + ) # Deleting list of projects that includes a non-existing project # should be silent. The root domain <> can't @@ -940,8 +1021,9 @@ class SqlIdentity(SqlTests, self.calls += 1 counter = CallCounter() - sqlalchemy.event.listen(sqlalchemy.orm.query.Query, 'before_compile', - counter.query_counter) + sqlalchemy.event.listen( + sqlalchemy.orm.query.Query, 'before_compile', counter.query_counter + ) first_call_users = PROVIDERS.identity_api.list_users() first_call_counter = counter.calls @@ -966,11 +1048,13 @@ class SqlIdentity(SqlTests, # | # project_2 project_1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project_1['id'], project_1) project_2 = unit.new_project_ref( domain_id=CONF.identity.default_domain_id, - parent_id=project_1['id']) + parent_id=project_1['id'], + ) PROVIDERS.resource_api.create_project(project_2['id'], project_2) # if max_depth is None or >= current project depth, return nothing. @@ -981,9 +1065,11 @@ class SqlIdentity(SqlTests, resp = PROVIDERS.resource_api.check_project_depth(max_depth=4) self.assertIsNone(resp) # if max_depth < current project depth, raise LimitTreeExceedError - self.assertRaises(exception.LimitTreeExceedError, - PROVIDERS.resource_api.check_project_depth, - 2) + self.assertRaises( + exception.LimitTreeExceedError, + PROVIDERS.resource_api.check_project_depth, + 2, + ) def test_update_user_with_stale_data_forces_retry(self): # Capture log output so we know oslo.db attempted a retry @@ -991,7 +1077,8 @@ class SqlIdentity(SqlTests, # Create a new user user_dict = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) new_user_dict = PROVIDERS.identity_api.create_user(user_dict) side_effects = [ @@ -1002,7 +1089,7 @@ class SqlIdentity(SqlTests, # The oslo.db library will retry the request, so the second # time this method is called let's return a valid session # object - sql.session_for_write() + sql.session_for_write(), ] with mock.patch('keystone.common.sql.session_for_write') as m: m.side_effect = side_effects @@ -1012,7 +1099,8 @@ class SqlIdentity(SqlTests, # will succeed new_user_dict['email'] = uuid.uuid4().hex PROVIDERS.identity_api.update_user( - new_user_dict['id'], new_user_dict) + new_user_dict['id'], new_user_dict + ) # Make sure oslo.db retried the update by checking the log output expected_log_message = ( @@ -1044,25 +1132,29 @@ class SqlCatalog(SqlTests, catalog_tests.CatalogTests): PROVIDERS.catalog_api.create_service(service['id'], service) malformed_url = "http://192.168.1.104:8774/v2/$(project)s" - endpoint = unit.new_endpoint_ref(service_id=service['id'], - url=malformed_url, - region_id=None) + endpoint = unit.new_endpoint_ref( + service_id=service['id'], url=malformed_url, region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint.copy()) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.catalog_api.get_v3_catalog, - 'fake-user', - 'fake-project') + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.catalog_api.get_v3_catalog, + 'fake-user', + 'fake-project', + ) def test_get_v3_catalog_with_empty_public_url(self): service = unit.new_service_ref() PROVIDERS.catalog_api.create_service(service['id'], service) - endpoint = unit.new_endpoint_ref(url='', service_id=service['id'], - region_id=None) + endpoint = unit.new_endpoint_ref( + url='', service_id=service['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint.copy()) - catalog = PROVIDERS.catalog_api.get_v3_catalog(self.user_foo['id'], - self.project_bar['id']) + catalog = PROVIDERS.catalog_api.get_v3_catalog( + self.user_foo['id'], self.project_bar['id'] + ) catalog_endpoint = catalog[0] self.assertEqual(service['name'], catalog_endpoint['name']) self.assertEqual(service['id'], catalog_endpoint['id']) @@ -1072,27 +1164,34 @@ class SqlCatalog(SqlTests, catalog_tests.CatalogTests): service = unit.new_service_ref() PROVIDERS.catalog_api.create_service(service['id'], service) - endpoint = unit.new_endpoint_ref(region_id=uuid.uuid4().hex, - service_id=service['id']) + endpoint = unit.new_endpoint_ref( + region_id=uuid.uuid4().hex, service_id=service['id'] + ) - self.assertRaises(exception.ValidationError, - PROVIDERS.catalog_api.create_endpoint, - endpoint['id'], - endpoint.copy()) + self.assertRaises( + exception.ValidationError, + PROVIDERS.catalog_api.create_endpoint, + endpoint['id'], + endpoint.copy(), + ) def test_create_region_invalid_id(self): region = unit.new_region_ref(id='0' * 256) - self.assertRaises(exception.StringLengthExceeded, - PROVIDERS.catalog_api.create_region, - region) + self.assertRaises( + exception.StringLengthExceeded, + PROVIDERS.catalog_api.create_region, + region, + ) def test_create_region_invalid_parent_id(self): region = unit.new_region_ref(parent_region_id='0' * 256) - self.assertRaises(exception.RegionNotFound, - PROVIDERS.catalog_api.create_region, - region) + self.assertRaises( + exception.RegionNotFound, + PROVIDERS.catalog_api.create_region, + region, + ) def test_delete_region_with_endpoint(self): # create a region @@ -1107,47 +1206,57 @@ class SqlCatalog(SqlTests, catalog_tests.CatalogTests): PROVIDERS.catalog_api.create_service(service['id'], service) # create an endpoint attached to the service and child region - child_endpoint = unit.new_endpoint_ref(region_id=child_region['id'], - service_id=service['id']) + child_endpoint = unit.new_endpoint_ref( + region_id=child_region['id'], service_id=service['id'] + ) PROVIDERS.catalog_api.create_endpoint( child_endpoint['id'], child_endpoint ) - self.assertRaises(exception.RegionDeletionError, - PROVIDERS.catalog_api.delete_region, - child_region['id']) + self.assertRaises( + exception.RegionDeletionError, + PROVIDERS.catalog_api.delete_region, + child_region['id'], + ) # create an endpoint attached to the service and parent region - endpoint = unit.new_endpoint_ref(region_id=region['id'], - service_id=service['id']) + endpoint = unit.new_endpoint_ref( + region_id=region['id'], service_id=service['id'] + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) - self.assertRaises(exception.RegionDeletionError, - PROVIDERS.catalog_api.delete_region, - region['id']) + self.assertRaises( + exception.RegionDeletionError, + PROVIDERS.catalog_api.delete_region, + region['id'], + ) def test_v3_catalog_domain_scoped_token(self): # test the case that project_id is None. srv_1 = unit.new_service_ref() PROVIDERS.catalog_api.create_service(srv_1['id'], srv_1) - endpoint_1 = unit.new_endpoint_ref(service_id=srv_1['id'], - region_id=None) + endpoint_1 = unit.new_endpoint_ref( + service_id=srv_1['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint_1['id'], endpoint_1) srv_2 = unit.new_service_ref() PROVIDERS.catalog_api.create_service(srv_2['id'], srv_2) - endpoint_2 = unit.new_endpoint_ref(service_id=srv_2['id'], - region_id=None) + endpoint_2 = unit.new_endpoint_ref( + service_id=srv_2['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint_2['id'], endpoint_2) - self.config_fixture.config(group='endpoint_filter', - return_all_endpoints_if_no_filter=True) + self.config_fixture.config( + group='endpoint_filter', return_all_endpoints_if_no_filter=True + ) catalog_ref = PROVIDERS.catalog_api.get_v3_catalog( uuid.uuid4().hex, None ) self.assertThat(catalog_ref, matchers.HasLength(2)) - self.config_fixture.config(group='endpoint_filter', - return_all_endpoints_if_no_filter=False) + self.config_fixture.config( + group='endpoint_filter', return_all_endpoints_if_no_filter=False + ) catalog_ref = PROVIDERS.catalog_api.get_v3_catalog( uuid.uuid4().hex, None ) @@ -1156,16 +1265,18 @@ class SqlCatalog(SqlTests, catalog_tests.CatalogTests): def test_v3_catalog_endpoint_filter_enabled(self): srv_1 = unit.new_service_ref() PROVIDERS.catalog_api.create_service(srv_1['id'], srv_1) - endpoint_1 = unit.new_endpoint_ref(service_id=srv_1['id'], - region_id=None) + endpoint_1 = unit.new_endpoint_ref( + service_id=srv_1['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint_1['id'], endpoint_1) - endpoint_2 = unit.new_endpoint_ref(service_id=srv_1['id'], - region_id=None) + endpoint_2 = unit.new_endpoint_ref( + service_id=srv_1['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint_2['id'], endpoint_2) # create endpoint-project association. PROVIDERS.catalog_api.add_endpoint_to_project( - endpoint_1['id'], - self.project_bar['id']) + endpoint_1['id'], self.project_bar['id'] + ) catalog_ref = PROVIDERS.catalog_api.get_v3_catalog( uuid.uuid4().hex, self.project_bar['id'] @@ -1173,17 +1284,20 @@ class SqlCatalog(SqlTests, catalog_tests.CatalogTests): self.assertThat(catalog_ref, matchers.HasLength(1)) self.assertThat(catalog_ref[0]['endpoints'], matchers.HasLength(1)) # the endpoint is that defined in the endpoint-project association. - self.assertEqual(endpoint_1['id'], - catalog_ref[0]['endpoints'][0]['id']) + self.assertEqual( + endpoint_1['id'], catalog_ref[0]['endpoints'][0]['id'] + ) def test_v3_catalog_endpoint_filter_disabled(self): # there is no endpoint-project association defined. - self.config_fixture.config(group='endpoint_filter', - return_all_endpoints_if_no_filter=True) + self.config_fixture.config( + group='endpoint_filter', return_all_endpoints_if_no_filter=True + ) srv_1 = unit.new_service_ref() PROVIDERS.catalog_api.create_service(srv_1['id'], srv_1) - endpoint_1 = unit.new_endpoint_ref(service_id=srv_1['id'], - region_id=None) + endpoint_1 = unit.new_endpoint_ref( + service_id=srv_1['id'], region_id=None + ) PROVIDERS.catalog_api.create_endpoint(endpoint_1['id'], endpoint_1) srv_2 = unit.new_service_ref() @@ -1239,7 +1353,8 @@ class SqlFilterTests(SqlTests, identity_tests.FilterTests): DOMAIN1_ENTITIES = 3 self.entity_list[entity] = self._create_test_data(entity, 2) self.domain1_entity_list[entity] = self._create_test_data( - entity, DOMAIN1_ENTITIES, self.domain1['id']) + entity, DOMAIN1_ENTITIES, self.domain1['id'] + ) # Should get back the DOMAIN1_ENTITIES in domain1 hints = driver_hints.Hints() @@ -1298,7 +1413,8 @@ class FakeTable(sql.ModelBase): @sql.handle_conflicts('keystone') def update(self): raise db_exception.DBError( - inner_exception=exc.IntegrityError('a', 'a', 'a')) + inner_exception=exc.IntegrityError('a', 'a', 'a') + ) @sql.handle_conflicts('keystone') def lookup(self): @@ -1308,8 +1424,9 @@ class FakeTable(sql.ModelBase): class SqlDecorators(unit.TestCase): def test_initialization_fail(self): - self.assertRaises(exception.StringLengthExceeded, - FakeTable, col='a' * 64) + self.assertRaises( + exception.StringLengthExceeded, FakeTable, col='a' * 64 + ) def test_initialization(self): tt = FakeTable(col='a') @@ -1329,23 +1446,25 @@ class SqlModuleInitialization(unit.TestCase): @mock.patch.object(options, 'set_defaults') def test_initialize_module(self, set_defaults, CONF): sql.initialize() - set_defaults.assert_called_with(CONF, - connection='sqlite:///keystone.db') + set_defaults.assert_called_with( + CONF, connection='sqlite:///keystone.db' + ) class SqlCredential(SqlTests): def _create_credential_with_user_id(self, user_id=uuid.uuid4().hex): - credential = unit.new_credential_ref(user_id=user_id, - extra=uuid.uuid4().hex, - type=uuid.uuid4().hex) + credential = unit.new_credential_ref( + user_id=user_id, extra=uuid.uuid4().hex, type=uuid.uuid4().hex + ) PROVIDERS.credential_api.create_credential( credential['id'], credential ) return credential - def _validateCredentialList(self, retrieved_credentials, - expected_credentials): + def _validateCredentialList( + self, retrieved_credentials, expected_credentials + ): self.assertEqual(len(expected_credentials), len(retrieved_credentials)) retrived_ids = [c['id'] for c in retrieved_credentials] for cred in expected_credentials: @@ -1358,14 +1477,13 @@ class SqlCredential(SqlTests): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_provider.MAX_ACTIVE_KEYS + credential_provider.MAX_ACTIVE_KEYS, ) ) self.credentials = [] for _ in range(3): - self.credentials.append( - self._create_credential_with_user_id()) + self.credentials.append(self._create_credential_with_user_id()) self.user_credentials = [] for _ in range(3): cred = self._create_credential_with_user_id(self.user_foo['id']) @@ -1383,21 +1501,22 @@ class SqlCredential(SqlTests): def test_list_credentials_for_user(self): credentials = PROVIDERS.credential_api.list_credentials_for_user( - self.user_foo['id']) + self.user_foo['id'] + ) self._validateCredentialList(credentials, self.user_credentials) def test_list_credentials_for_user_and_type(self): cred = self.user_credentials[0] credentials = PROVIDERS.credential_api.list_credentials_for_user( - self.user_foo['id'], type=cred['type']) + self.user_foo['id'], type=cred['type'] + ) self._validateCredentialList(credentials, [cred]) def test_create_credential_is_encrypted_when_stored(self): credential = unit.new_credential_ref(user_id=uuid.uuid4().hex) credential_id = credential['id'] returned_credential = PROVIDERS.credential_api.create_credential( - credential_id, - credential + credential_id, credential ) # Make sure the `blob` is *not* encrypted when returned from the @@ -1411,8 +1530,7 @@ class SqlCredential(SqlTests): # Pull the credential directly from the backend, the `blob` should be # encrypted. self.assertNotEqual( - credential_from_backend['encrypted_blob'], - credential['blob'] + credential_from_backend['encrypted_blob'], credential['blob'] ) def test_list_credentials_is_decrypted(self): @@ -1420,8 +1538,7 @@ class SqlCredential(SqlTests): credential_id = credential['id'] created_credential = PROVIDERS.credential_api.create_credential( - credential_id, - credential + credential_id, credential ) # Pull the credential directly from the backend, the `blob` should be @@ -1430,8 +1547,7 @@ class SqlCredential(SqlTests): PROVIDERS.credential_api.driver.get_credential(credential_id) ) self.assertNotEqual( - credential_from_backend['encrypted_blob'], - credential['blob'] + credential_from_backend['encrypted_blob'], credential['blob'] ) # Make sure the `blob` values listed from the API are not encrypted. @@ -1481,14 +1597,24 @@ class SqlLimit(SqlTests, limit_tests.LimitTests): registered_limit_1 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_one['id'], - resource_name='volume', default_limit=10, id=uuid.uuid4().hex) + resource_name='volume', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_2 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='snapshot', default_limit=10, id=uuid.uuid4().hex) + resource_name='snapshot', + default_limit=10, + id=uuid.uuid4().hex, + ) registered_limit_3 = unit.new_registered_limit_ref( service_id=self.service_one['id'], region_id=self.region_two['id'], - resource_name='backup', default_limit=10, id=uuid.uuid4().hex) + resource_name='backup', + default_limit=10, + id=uuid.uuid4().hex, + ) PROVIDERS.unified_limit_api.create_registered_limits( - [registered_limit_1, registered_limit_2, registered_limit_3]) + [registered_limit_1, registered_limit_2, registered_limit_3] + ) diff --git a/keystone/tests/unit/test_backend_templated.py b/keystone/tests/unit/test_backend_templated.py index 3fb69d14e6..6153a6c18c 100644 --- a/keystone/tests/unit/test_backend_templated.py +++ b/keystone/tests/unit/test_backend_templated.py @@ -24,8 +24,9 @@ from keystone.tests.unit.ksfixtures import database PROVIDERS = provider_api.ProviderAPIs -BROKEN_WRITE_FUNCTIONALITY_MSG = ("Templated backend doesn't correctly " - "implement write operations") +BROKEN_WRITE_FUNCTIONALITY_MSG = ( + "Templated backend doesn't correctly " "implement write operations" +) class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): @@ -37,15 +38,15 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): 'publicURL': 'http://localhost:8774/v1.1/bar', 'internalURL': 'http://localhost:8774/v1.1/bar', 'name': "'Compute Service'", - 'id': '2' + 'id': '2', }, 'identity': { 'adminURL': 'http://localhost:35357/v3', 'publicURL': 'http://localhost:5000/v3', 'internalURL': 'http://localhost:35357/v3', 'name': "'Identity Service'", - 'id': '1' - } + 'id': '1', + }, } } @@ -60,7 +61,8 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): self.config_fixture.config( group='catalog', driver='templated', - template_file=unit.dirs.tests('default_catalog.templates')) + template_file=unit.dirs.tests('default_catalog.templates'), + ) def test_get_catalog(self): catalog_ref = PROVIDERS.catalog_api.get_catalog('foo', 'bar') @@ -83,13 +85,16 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): def test_get_v3_catalog_endpoint_disabled(self): self.skip_test_overrides( - "Templated backend doesn't have disabled endpoints") + "Templated backend doesn't have disabled endpoints" + ) def assert_catalogs_equal(self, expected, observed): def sort_key(d): return d['id'] - for e, o in zip(sorted(expected, key=sort_key), - sorted(observed, key=sort_key)): + + for e, o in zip( + sorted(expected, key=sort_key), sorted(observed, key=sort_key) + ): expected_endpoints = e.pop('endpoints') observed_endpoints = o.pop('endpoints') self.assertDictEqual(e, o) @@ -100,32 +105,51 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): project_id = uuid.uuid4().hex catalog_ref = PROVIDERS.catalog_api.get_v3_catalog(user_id, project_id) exp_catalog = [ - {'endpoints': [ - {'interface': 'admin', - 'region': 'RegionOne', - 'url': 'http://localhost:8774/v1.1/%s' % project_id}, - {'interface': 'public', - 'region': 'RegionOne', - 'url': 'http://localhost:8774/v1.1/%s' % project_id}, - {'interface': 'internal', - 'region': 'RegionOne', - 'url': 'http://localhost:8774/v1.1/%s' % project_id}], - 'type': 'compute', - 'name': "'Compute Service'", - 'id': '2'}, - {'endpoints': [ - {'interface': 'admin', - 'region': 'RegionOne', - 'url': 'http://localhost:35357/v3'}, - {'interface': 'public', - 'region': 'RegionOne', - 'url': 'http://localhost:5000/v3'}, - {'interface': 'internal', - 'region': 'RegionOne', - 'url': 'http://localhost:35357/v3'}], - 'type': 'identity', - 'name': "'Identity Service'", - 'id': '1'}] + { + 'endpoints': [ + { + 'interface': 'admin', + 'region': 'RegionOne', + 'url': 'http://localhost:8774/v1.1/%s' % project_id, + }, + { + 'interface': 'public', + 'region': 'RegionOne', + 'url': 'http://localhost:8774/v1.1/%s' % project_id, + }, + { + 'interface': 'internal', + 'region': 'RegionOne', + 'url': 'http://localhost:8774/v1.1/%s' % project_id, + }, + ], + 'type': 'compute', + 'name': "'Compute Service'", + 'id': '2', + }, + { + 'endpoints': [ + { + 'interface': 'admin', + 'region': 'RegionOne', + 'url': 'http://localhost:35357/v3', + }, + { + 'interface': 'public', + 'region': 'RegionOne', + 'url': 'http://localhost:5000/v3', + }, + { + 'interface': 'internal', + 'region': 'RegionOne', + 'url': 'http://localhost:35357/v3', + }, + ], + 'type': 'identity', + 'name': "'Identity Service'", + 'id': '1', + }, + ] self.assert_catalogs_equal(exp_catalog, catalog_ref) def test_get_multi_region_v3_catalog(self): @@ -136,54 +160,86 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): # Load the multi-region catalog. catalog_api._load_templates( - unit.dirs.tests('default_catalog_multi_region.templates')) + unit.dirs.tests('default_catalog_multi_region.templates') + ) catalog_ref = catalog_api.get_v3_catalog(user_id, project_id) exp_catalog = [ - {'endpoints': [ - {'interface': 'admin', - 'region': 'RegionOne', - 'url': 'http://region-one:8774/v1.1/%s' % project_id}, - {'interface': 'public', - 'region': 'RegionOne', - 'url': 'http://region-one:8774/v1.1/%s' % project_id}, - {'interface': 'internal', - 'region': 'RegionOne', - 'url': 'http://region-one:8774/v1.1/%s' % project_id}, - {'interface': 'admin', - 'region': 'RegionTwo', - 'url': 'http://region-two:8774/v1.1/%s' % project_id}, - {'interface': 'public', - 'region': 'RegionTwo', - 'url': 'http://region-two:8774/v1.1/%s' % project_id}, - {'interface': 'internal', - 'region': 'RegionTwo', - 'url': 'http://region-two:8774/v1.1/%s' % project_id}], + { + 'endpoints': [ + { + 'interface': 'admin', + 'region': 'RegionOne', + 'url': 'http://region-one:8774/v1.1/%s' % project_id, + }, + { + 'interface': 'public', + 'region': 'RegionOne', + 'url': 'http://region-one:8774/v1.1/%s' % project_id, + }, + { + 'interface': 'internal', + 'region': 'RegionOne', + 'url': 'http://region-one:8774/v1.1/%s' % project_id, + }, + { + 'interface': 'admin', + 'region': 'RegionTwo', + 'url': 'http://region-two:8774/v1.1/%s' % project_id, + }, + { + 'interface': 'public', + 'region': 'RegionTwo', + 'url': 'http://region-two:8774/v1.1/%s' % project_id, + }, + { + 'interface': 'internal', + 'region': 'RegionTwo', + 'url': 'http://region-two:8774/v1.1/%s' % project_id, + }, + ], 'type': 'compute', 'name': "'Compute Service'", - 'id': '2'}, - {'endpoints': [ - {'interface': 'admin', - 'region': 'RegionOne', - 'url': 'http://region-one:35357/v3'}, - {'interface': 'public', - 'region': 'RegionOne', - 'url': 'http://region-one:5000/v3'}, - {'interface': 'internal', - 'region': 'RegionOne', - 'url': 'http://region-one:35357/v3'}, - {'interface': 'admin', - 'region': 'RegionTwo', - 'url': 'http://region-two:35357/v3'}, - {'interface': 'public', - 'region': 'RegionTwo', - 'url': 'http://region-two:5000/v3'}, - {'interface': 'internal', - 'region': 'RegionTwo', - 'url': 'http://region-two:35357/v3'}], + 'id': '2', + }, + { + 'endpoints': [ + { + 'interface': 'admin', + 'region': 'RegionOne', + 'url': 'http://region-one:35357/v3', + }, + { + 'interface': 'public', + 'region': 'RegionOne', + 'url': 'http://region-one:5000/v3', + }, + { + 'interface': 'internal', + 'region': 'RegionOne', + 'url': 'http://region-one:35357/v3', + }, + { + 'interface': 'admin', + 'region': 'RegionTwo', + 'url': 'http://region-two:35357/v3', + }, + { + 'interface': 'public', + 'region': 'RegionTwo', + 'url': 'http://region-two:5000/v3', + }, + { + 'interface': 'internal', + 'region': 'RegionTwo', + 'url': 'http://region-two:35357/v3', + }, + ], 'type': 'identity', 'name': "'Identity Service'", - 'id': '1'}] + 'id': '1', + }, + ] self.assert_catalogs_equal(exp_catalog, catalog_ref) def test_get_catalog_ignores_endpoints_with_invalid_urls(self): @@ -193,23 +249,35 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): # endpoint which contains this kind of URL. catalog_ref = PROVIDERS.catalog_api.get_v3_catalog(user_id, project_id) exp_catalog = [ - {'endpoints': [], - 'type': 'compute', - 'name': "'Compute Service'", - 'id': '2'}, - {'endpoints': [ - {'interface': 'admin', - 'region': 'RegionOne', - 'url': 'http://localhost:35357/v3'}, - {'interface': 'public', - 'region': 'RegionOne', - 'url': 'http://localhost:5000/v3'}, - {'interface': 'internal', - 'region': 'RegionOne', - 'url': 'http://localhost:35357/v3'}], - 'type': 'identity', - 'name': "'Identity Service'", - 'id': '1'}] + { + 'endpoints': [], + 'type': 'compute', + 'name': "'Compute Service'", + 'id': '2', + }, + { + 'endpoints': [ + { + 'interface': 'admin', + 'region': 'RegionOne', + 'url': 'http://localhost:35357/v3', + }, + { + 'interface': 'public', + 'region': 'RegionOne', + 'url': 'http://localhost:5000/v3', + }, + { + 'interface': 'internal', + 'region': 'RegionOne', + 'url': 'http://localhost:35357/v3', + }, + ], + 'type': 'identity', + 'name': "'Identity Service'", + 'id': '1', + }, + ] self.assert_catalogs_equal(exp_catalog, catalog_ref) def test_list_regions_filtered_by_parent_region_id(self): @@ -222,16 +290,21 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): hints = {} services = PROVIDERS.catalog_api.list_services(hints=hints) exp_services = [ - {'type': 'compute', - 'description': '', - 'enabled': True, - 'name': "'Compute Service'", - 'id': 'compute'}, - {'type': 'identity', - 'description': '', - 'enabled': True, - 'name': "'Identity Service'", - 'id': 'identity'}] + { + 'type': 'compute', + 'description': '', + 'enabled': True, + 'name': "'Compute Service'", + 'id': 'compute', + }, + { + 'type': 'identity', + 'description': '', + 'enabled': True, + 'name': "'Identity Service'", + 'id': 'identity', + }, + ] self.assertCountEqual(exp_services, services) # NOTE(dstanek): the following methods have been overridden @@ -263,8 +336,10 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): def test_avoid_creating_circular_references_in_regions_update(self): self.skip_test_overrides(BROKEN_WRITE_FUNCTIONALITY_MSG) - @mock.patch.object(catalog_base.CatalogDriverBase, - "_ensure_no_circle_in_hierarchical_regions") + @mock.patch.object( + catalog_base.CatalogDriverBase, + "_ensure_no_circle_in_hierarchical_regions", + ) def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle): self.skip_test_overrides(BROKEN_WRITE_FUNCTIONALITY_MSG) @@ -299,7 +374,8 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): def test_get_endpoint_returns_not_found(self): self.skip_test_overrides( - "Templated backend doesn't use IDs for endpoints.") + "Templated backend doesn't use IDs for endpoints." + ) def test_delete_endpoint_returns_not_found(self): self.skip_test_overrides(BROKEN_WRITE_FUNCTIONALITY_MSG) @@ -311,9 +387,13 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): self.skip_test_overrides(BROKEN_WRITE_FUNCTIONALITY_MSG) def test_list_endpoints(self): - expected_urls = set(['http://localhost:5000/v3', - 'http://localhost:35357/v3', - 'http://localhost:8774/v1.1/$(tenant_id)s']) + expected_urls = set( + [ + 'http://localhost:5000/v3', + 'http://localhost:35357/v3', + 'http://localhost:8774/v1.1/$(tenant_id)s', + ] + ) endpoints = PROVIDERS.catalog_api.list_endpoints() self.assertEqual(expected_urls, set(e['url'] for e in endpoints)) @@ -325,16 +405,15 @@ class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): # Deleting endpoint group association is not supported by the templated # driver, but it should be silent about it and not raise an error. PROVIDERS.catalog_api.delete_endpoint_group_association_by_project( - uuid.uuid4().hex) + uuid.uuid4().hex + ) def test_delete_association_by_endpoint(self): # Deleting endpoint association is not supported by the templated # driver, but it should be silent about it and not raise an error. - PROVIDERS.catalog_api.delete_association_by_endpoint( - uuid.uuid4().hex) + PROVIDERS.catalog_api.delete_association_by_endpoint(uuid.uuid4().hex) def test_delete_association_by_project(self): # Deleting endpoint association is not supported by the templated # driver, but it should be silent about it and not raise an error. - PROVIDERS.catalog_api.delete_association_by_project( - uuid.uuid4().hex) + PROVIDERS.catalog_api.delete_association_by_project(uuid.uuid4().hex) diff --git a/keystone/tests/unit/test_cli.py b/keystone/tests/unit/test_cli.py index e8f0883d54..f37bdf2c83 100644 --- a/keystone/tests/unit/test_cli.py +++ b/keystone/tests/unit/test_cli.py @@ -65,8 +65,11 @@ class CliLoggingTestCase(unit.BaseTestCase): def setUp(self): self.config_fixture = self.useFixture(oslo_config.fixture.Config(CONF)) self.config_fixture.register_cli_opt(cli.command_opt) - self.useFixture(fixtures.MockPatch( - 'oslo_config.cfg.find_config_files', return_value=[])) + self.useFixture( + fixtures.MockPatch( + 'oslo_config.cfg.find_config_files', return_value=[] + ) + ) fd = self.useFixture(temporaryfile.SecureTempFile()) self.fake_config_file = fd.file_name super(CliLoggingTestCase, self).setUp() @@ -76,8 +79,10 @@ class CliLoggingTestCase(unit.BaseTestCase): class FakeConfCommand(object): def __init__(self): self.cmd_class = mock.Mock() - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', FakeConfCommand())) + + self.useFixture( + fixtures.MockPatchObject(CONF, 'command', FakeConfCommand()) + ) self.logging = self.useFixture(fixtures.FakeLogger(level=log.WARN)) @@ -88,7 +93,10 @@ class CliLoggingTestCase(unit.BaseTestCase): def test_present_config_does_not_log_warning(self): fake_argv = [ - 'keystone-manage', '--config-file', self.fake_config_file, 'doctor' + 'keystone-manage', + '--config-file', + self.fake_config_file, + 'doctor', ] cli.main(argv=fake_argv) expected_msg = 'Config file not found, using default configs.' @@ -109,9 +117,11 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): return config_files def config(self, config_files): - CONF(args=['bootstrap', '--bootstrap-password', uuid.uuid4().hex], - project='keystone', - default_config_files=config_files) + CONF( + args=['bootstrap', '--bootstrap-password', uuid.uuid4().hex], + project='keystone', + default_config_files=config_files, + ) def test_bootstrap(self): self._do_test_bootstrap(self.bootstrap) @@ -120,26 +130,26 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): try: PROVIDERS.resource_api.create_domain( default_fixtures.ROOT_DOMAIN['id'], - default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN, + ) except exception.Conflict: pass bootstrap.do_bootstrap() project = PROVIDERS.resource_api.get_project_by_name( - bootstrap.project_name, - 'default') + bootstrap.project_name, 'default' + ) user = PROVIDERS.identity_api.get_user_by_name( - bootstrap.username, - 'default') + bootstrap.username, 'default' + ) admin_role = PROVIDERS.role_api.get_role(bootstrap.role_id) manager_role = PROVIDERS.role_api.get_role(bootstrap.manager_role_id) member_role = PROVIDERS.role_api.get_role(bootstrap.member_role_id) reader_role = PROVIDERS.role_api.get_role(bootstrap.reader_role_id) service_role = PROVIDERS.role_api.get_role(bootstrap.service_role_id) - role_list = ( - PROVIDERS.assignment_api.get_roles_for_user_and_project( - user['id'], - project['id'])) + role_list = PROVIDERS.assignment_api.get_roles_for_user_and_project( + user['id'], project['id'] + ) role_list_len = 5 if bootstrap.bootstrapper.project_name: @@ -154,19 +164,15 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): if not bootstrap.bootstrapper.project_name: self.assertIn(service_role['id'], role_list) - system_roles = ( - PROVIDERS.assignment_api.list_system_grants_for_user( - user['id'] - ) + system_roles = PROVIDERS.assignment_api.list_system_grants_for_user( + user['id'] ) self.assertIs(1, len(system_roles)) self.assertEqual(system_roles[0]['id'], admin_role['id']) # NOTE(morganfainberg): Pass an empty context, it isn't used by # `authenticate` method. with self.make_request(): - PROVIDERS.identity_api.authenticate( - user['id'], - bootstrap.password) + PROVIDERS.identity_api.authenticate(user['id'], bootstrap.password) if bootstrap.region_id: region = PROVIDERS.catalog_api.get_region(bootstrap.region_id) @@ -176,12 +182,15 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): svc = PROVIDERS.catalog_api.get_service(bootstrap.service_id) self.assertEqual(self.service_name, svc['name']) - self.assertEqual(set(['admin', 'public', 'internal']), - set(bootstrap.endpoints)) + self.assertEqual( + set(['admin', 'public', 'internal']), set(bootstrap.endpoints) + ) - urls = {'public': self.public_url, - 'internal': self.internal_url, - 'admin': self.admin_url} + urls = { + 'public': self.public_url, + 'internal': self.internal_url, + 'admin': self.admin_url, + } for interface, url in urls.items(): endpoint_id = bootstrap.endpoints[interface] @@ -205,35 +214,36 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): "user": { "name": self.bootstrap.username, "password": self.bootstrap.password, - "domain": { - "id": CONF.identity.default_domain_id - } + "domain": {"id": CONF.identity.default_domain_id}, } - } + }, } } } with app.test_client() as c: - auth_response = c.post('/v3/auth/tokens', - json=v3_password_data) + auth_response = c.post('/v3/auth/tokens', json=v3_password_data) token = auth_response.headers['X-Subject-Token'] self._do_test_bootstrap(self.bootstrap) # build validation request with app.test_client() as c: # Get a new X-Auth-Token - r = c.post( - '/v3/auth/tokens', - json=v3_password_data) + r = c.post('/v3/auth/tokens', json=v3_password_data) # Validate the old token with our new X-Auth-Token. - c.get('/v3/auth/tokens', - headers={'X-Auth-Token': r.headers['X-Subject-Token'], - 'X-Subject-Token': token}) + c.get( + '/v3/auth/tokens', + headers={ + 'X-Auth-Token': r.headers['X-Subject-Token'], + 'X-Subject-Token': token, + }, + ) admin_role = PROVIDERS.role_api.get_role(self.bootstrap.role_id) reader_role = PROVIDERS.role_api.get_role( - self.bootstrap.reader_role_id) + self.bootstrap.reader_role_id + ) member_role = PROVIDERS.role_api.get_role( - self.bootstrap.member_role_id) + self.bootstrap.member_role_id + ) self.assertEqual(admin_role['options'], {'immutable': True}) self.assertEqual(member_role['options'], {'immutable': True}) self.assertEqual(reader_role['options'], {'immutable': True}) @@ -251,25 +261,25 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): "user": { "name": self.bootstrap.username, "password": self.bootstrap.password, - "domain": { - "id": CONF.identity.default_domain_id - } + "domain": {"id": CONF.identity.default_domain_id}, } - } + }, } } } time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_time: with app.test_client() as c: - auth_response = c.post('/v3/auth/tokens', - json=v3_password_data) + auth_response = c.post( + '/v3/auth/tokens', json=v3_password_data + ) token = auth_response.headers['X-Subject-Token'] new_passwd = uuid.uuid4().hex os.environ['OS_BOOTSTRAP_PASSWORD'] = new_passwd self._do_test_bootstrap(self.bootstrap) v3_password_data['auth']['identity']['password']['user'][ - 'password'] = new_passwd + 'password' + ] = new_passwd # Move time forward a second to avoid rev. event capturing the new # auth-token since we're within a single second (possibly) for the # test case. @@ -284,22 +294,25 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): # the user's password was updated. Since this token was # obtained using the original password, it should now be # invalid. - c.get('/v3/auth/tokens', - headers={'X-Auth-Token': r.headers['X-Subject-Token'], - 'X-Subject-Token': token}, - expected_status_code=http.client.NOT_FOUND) + c.get( + '/v3/auth/tokens', + headers={ + 'X-Auth-Token': r.headers['X-Subject-Token'], + 'X-Subject-Token': token, + }, + expected_status_code=http.client.NOT_FOUND, + ) def test_bootstrap_recovers_user(self): self._do_test_bootstrap(self.bootstrap) # Completely lock the user out. user_id = PROVIDERS.identity_api.get_user_by_name( - self.bootstrap.username, - 'default')['id'] + self.bootstrap.username, 'default' + )['id'] PROVIDERS.identity_api.update_user( - user_id, - {'enabled': False, - 'password': uuid.uuid4().hex}) + user_id, {'enabled': False, 'password': uuid.uuid4().hex} + ) # The second bootstrap run will recover the account. self._do_test_bootstrap(self.bootstrap) @@ -307,49 +320,66 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): # Sanity check that the original password works again. with self.make_request(): PROVIDERS.identity_api.authenticate( - user_id, - self.bootstrap.password) + user_id, self.bootstrap.password + ) def test_bootstrap_with_explicit_immutable_roles(self): - CONF(args=['bootstrap', - '--bootstrap-password', uuid.uuid4().hex, - '--immutable-roles'], - project='keystone') + CONF( + args=[ + 'bootstrap', + '--bootstrap-password', + uuid.uuid4().hex, + '--immutable-roles', + ], + project='keystone', + ) self._do_test_bootstrap(self.bootstrap) admin_role = PROVIDERS.role_api.get_role(self.bootstrap.role_id) reader_role = PROVIDERS.role_api.get_role( - self.bootstrap.reader_role_id) + self.bootstrap.reader_role_id + ) member_role = PROVIDERS.role_api.get_role( - self.bootstrap.member_role_id) + self.bootstrap.member_role_id + ) self.assertTrue(admin_role['options']['immutable']) self.assertTrue(member_role['options']['immutable']) self.assertTrue(reader_role['options']['immutable']) def test_bootstrap_with_default_immutable_roles(self): - CONF(args=['bootstrap', - '--bootstrap-password', uuid.uuid4().hex], - project='keystone') + CONF( + args=['bootstrap', '--bootstrap-password', uuid.uuid4().hex], + project='keystone', + ) self._do_test_bootstrap(self.bootstrap) admin_role = PROVIDERS.role_api.get_role(self.bootstrap.role_id) reader_role = PROVIDERS.role_api.get_role( - self.bootstrap.reader_role_id) + self.bootstrap.reader_role_id + ) member_role = PROVIDERS.role_api.get_role( - self.bootstrap.member_role_id) + self.bootstrap.member_role_id + ) self.assertTrue(admin_role['options']['immutable']) self.assertTrue(member_role['options']['immutable']) self.assertTrue(reader_role['options']['immutable']) def test_bootstrap_with_no_immutable_roles(self): - CONF(args=['bootstrap', - '--bootstrap-password', uuid.uuid4().hex, - '--no-immutable-roles'], - project='keystone') + CONF( + args=[ + 'bootstrap', + '--bootstrap-password', + uuid.uuid4().hex, + '--no-immutable-roles', + ], + project='keystone', + ) self._do_test_bootstrap(self.bootstrap) admin_role = PROVIDERS.role_api.get_role(self.bootstrap.role_id) reader_role = PROVIDERS.role_api.get_role( - self.bootstrap.reader_role_id) + self.bootstrap.reader_role_id + ) member_role = PROVIDERS.role_api.get_role( - self.bootstrap.member_role_id) + self.bootstrap.member_role_id + ) self.assertNotIn('immutable', admin_role['options']) self.assertNotIn('immutable', member_role['options']) self.assertNotIn('immutable', reader_role['options']) @@ -368,7 +398,7 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): domain_role = { 'domain_id': domain['id'], 'id': uuid.uuid4().hex, - 'name': name + 'name': name, } domain_roles[name] = PROVIDERS.role_api.create_role( domain_role['id'], domain_role @@ -382,8 +412,11 @@ class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): class CliBootStrapTestCaseWithEnvironment(CliBootStrapTestCase): def config(self, config_files): - CONF(args=['bootstrap'], project='keystone', - default_config_files=config_files) + CONF( + args=['bootstrap'], + project='keystone', + default_config_files=config_files, + ) def setUp(self): super(CliBootStrapTestCaseWithEnvironment, self).setUp() @@ -401,94 +434,124 @@ class CliBootStrapTestCaseWithEnvironment(CliBootStrapTestCase): 'name': 'Default', } self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_PASSWORD', - newvalue=self.password)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_PASSWORD', newvalue=self.password + ) + ) self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_USERNAME', - newvalue=self.username)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_USERNAME', newvalue=self.username + ) + ) self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_PROJECT_NAME', - newvalue=self.project_name)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_PROJECT_NAME', newvalue=self.project_name + ) + ) self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_ROLE_NAME', - newvalue=self.role_name)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_ROLE_NAME', newvalue=self.role_name + ) + ) self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_SERVICE_NAME', - newvalue=self.service_name)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_SERVICE_NAME', newvalue=self.service_name + ) + ) self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_PUBLIC_URL', - newvalue=self.public_url)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_PUBLIC_URL', newvalue=self.public_url + ) + ) self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_INTERNAL_URL', - newvalue=self.internal_url)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_INTERNAL_URL', newvalue=self.internal_url + ) + ) self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_ADMIN_URL', - newvalue=self.admin_url)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_ADMIN_URL', newvalue=self.admin_url + ) + ) self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_REGION_ID', - newvalue=self.region_id)) + fixtures.EnvironmentVariable( + 'OS_BOOTSTRAP_REGION_ID', newvalue=self.region_id + ) + ) PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) def test_assignment_created_with_user_exists(self): # test assignment can be created if user already exists. - PROVIDERS.resource_api.create_domain(self.default_domain['id'], - self.default_domain) - user_ref = unit.new_user_ref(self.default_domain['id'], - name=self.username, - password=self.password) + PROVIDERS.resource_api.create_domain( + self.default_domain['id'], self.default_domain + ) + user_ref = unit.new_user_ref( + self.default_domain['id'], + name=self.username, + password=self.password, + ) PROVIDERS.identity_api.create_user(user_ref) self._do_test_bootstrap(self.bootstrap) def test_assignment_created_with_project_exists(self): # test assignment can be created if project already exists. - PROVIDERS.resource_api.create_domain(self.default_domain['id'], - self.default_domain) - project_ref = unit.new_project_ref(self.default_domain['id'], - name=self.project_name) + PROVIDERS.resource_api.create_domain( + self.default_domain['id'], self.default_domain + ) + project_ref = unit.new_project_ref( + self.default_domain['id'], name=self.project_name + ) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) self._do_test_bootstrap(self.bootstrap) def test_assignment_created_with_role_exists(self): # test assignment can be created if role already exists. - PROVIDERS.resource_api.create_domain(self.default_domain['id'], - self.default_domain) + PROVIDERS.resource_api.create_domain( + self.default_domain['id'], self.default_domain + ) role = unit.new_role_ref(name=self.role_name) PROVIDERS.role_api.create_role(role['id'], role) self._do_test_bootstrap(self.bootstrap) def test_assignment_created_with_region_exists(self): # test assignment can be created if region already exists. - PROVIDERS.resource_api.create_domain(self.default_domain['id'], - self.default_domain) + PROVIDERS.resource_api.create_domain( + self.default_domain['id'], self.default_domain + ) region = unit.new_region_ref(id=self.region_id) PROVIDERS.catalog_api.create_region(region) self._do_test_bootstrap(self.bootstrap) def test_endpoints_created_with_service_exists(self): # test assignment can be created if service already exists. - PROVIDERS.resource_api.create_domain(self.default_domain['id'], - self.default_domain) + PROVIDERS.resource_api.create_domain( + self.default_domain['id'], self.default_domain + ) service = unit.new_service_ref(name=self.service_name) PROVIDERS.catalog_api.create_service(service['id'], service) self._do_test_bootstrap(self.bootstrap) def test_endpoints_created_with_endpoint_exists(self): # test assignment can be created if endpoint already exists. - PROVIDERS.resource_api.create_domain(self.default_domain['id'], - self.default_domain) + PROVIDERS.resource_api.create_domain( + self.default_domain['id'], self.default_domain + ) service = unit.new_service_ref(name=self.service_name) PROVIDERS.catalog_api.create_service(service['id'], service) region = unit.new_region_ref(id=self.region_id) PROVIDERS.catalog_api.create_region(region) - endpoint = unit.new_endpoint_ref(interface='public', - service_id=service['id'], - url=self.public_url, - region_id=self.region_id) + endpoint = unit.new_endpoint_ref( + interface='public', + service_id=service['id'], + url=self.public_url, + region_id=self.region_id, + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) self._do_test_bootstrap(self.bootstrap) @@ -498,10 +561,12 @@ class CliBootStrapTestCaseWithEnvironment(CliBootStrapTestCase): PROVIDERS.catalog_api.create_service(service['id'], service) region = unit.new_region_ref(id=self.region_id) PROVIDERS.catalog_api.create_region(region) - endpoint = unit.new_endpoint_ref(interface='public', - service_id=service['id'], - url=uuid.uuid4().hex, - region_id=self.region_id) + endpoint = unit.new_endpoint_ref( + interface='public', + service_id=service['id'], + url=uuid.uuid4().hex, + region_id=self.region_id, + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) self._do_test_bootstrap(self.bootstrap) @@ -517,11 +582,11 @@ class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase): self.load_backends() self.config_fixture.config( group='identity', - domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap') + domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap', + ) self.domain_count = 3 self.setup_initial_domains() - self.logging = self.useFixture( - fixtures.FakeLogger(level=logging.INFO)) + self.logging = self.useFixture(fixtures.FakeLogger(level=logging.INFO)) def config_files(self): self.config_fixture.register_cli_opt(cli.command_opt) @@ -535,7 +600,8 @@ class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase): # Not allowed to delete the default domain, but should at least # delete any domain-specific config for it. PROVIDERS.domain_config_api.delete_config( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) continue this_domain = self.domains[domain] this_domain['enabled'] = False @@ -546,8 +612,11 @@ class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase): self.domains = {} def config(self, config_files): - CONF(args=['domain_config_upload', '--all'], project='keystone', - default_config_files=config_files) + CONF( + args=['domain_config_upload', '--all'], + project='keystone', + default_config_files=config_files, + ) def setup_initial_domains(self): @@ -555,46 +624,54 @@ class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase): return PROVIDERS.resource_api.create_domain(domain['id'], domain) PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) self.domains = {} self.addCleanup(self.cleanup_domains) for x in range(1, self.domain_count): domain = 'domain%s' % x self.domains[domain] = create_domain( - {'id': uuid.uuid4().hex, 'name': domain}) + {'id': uuid.uuid4().hex, 'name': domain} + ) self.default_domain = unit.new_domain_ref( description=u'The default domain', id=CONF.identity.default_domain_id, - name=u'Default') + name=u'Default', + ) self.domains['domain_default'] = create_domain(self.default_domain) def test_config_upload(self): # The values below are the same as in the domain_configs_multi_ldap # directory of test config_files. default_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap'} + 'ldap': { + 'url': 'fake://memory', + 'user': 'cn=Admin', + 'password': 'password', + 'suffix': 'cn=example,cn=com', + }, + 'identity': {'driver': 'ldap'}, } domain1_config = { - 'ldap': {'url': 'fake://memory1', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap', - 'list_limit': '101'} + 'ldap': { + 'url': 'fake://memory1', + 'user': 'cn=Admin', + 'password': 'password', + 'suffix': 'cn=example,cn=com', + }, + 'identity': {'driver': 'ldap', 'list_limit': '101'}, } domain2_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=myroot,cn=com', - 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org', - 'user_tree_dn': 'ou=Users,dc=myroot,dc=org'}, - 'identity': {'driver': 'ldap'} + 'ldap': { + 'url': 'fake://memory', + 'user': 'cn=Admin', + 'password': 'password', + 'suffix': 'cn=myroot,cn=com', + 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org', + 'user_tree_dn': 'ou=Users,dc=myroot,dc=org', + }, + 'identity': {'driver': 'ldap'}, } # Clear backend dependencies, since cli loads these manually @@ -602,31 +679,39 @@ class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase): cli.DomainConfigUpload.main() res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) self.assertEqual(default_config, res) res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domains['domain1']['id']) + self.domains['domain1']['id'] + ) self.assertEqual(domain1_config, res) res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domains['domain2']['id']) + self.domains['domain2']['id'] + ) self.assertEqual(domain2_config, res) class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase): def config(self, config_files): - CONF(args=['domain_config_upload', '--domain-name', 'Default'], - project='keystone', default_config_files=config_files) + CONF( + args=['domain_config_upload', '--domain-name', 'Default'], + project='keystone', + default_config_files=config_files, + ) def test_config_upload(self): # The values below are the same as in the domain_configs_multi_ldap # directory of test config_files. default_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap'} + 'ldap': { + 'url': 'fake://memory', + 'user': 'cn=Admin', + 'password': 'password', + 'suffix': 'cn=example,cn=com', + }, + 'identity': {'driver': 'ldap'}, } # Clear backend dependencies, since cli loads these manually @@ -634,40 +719,48 @@ class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase): cli.DomainConfigUpload.main() res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) self.assertEqual(default_config, res) res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domains['domain1']['id']) + self.domains['domain1']['id'] + ) self.assertEqual({}, res) res = PROVIDERS.domain_config_api.get_config_with_sensitive_info( - self.domains['domain2']['id']) + self.domains['domain2']['id'] + ) self.assertEqual({}, res) def test_no_overwrite_config(self): # Create a config for the default domain default_config = { 'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': 'ldap'} + 'identity': {'driver': 'ldap'}, } PROVIDERS.domain_config_api.create_config( - CONF.identity.default_domain_id, default_config) + CONF.identity.default_domain_id, default_config + ) # Now try and upload the settings in the configuration file for the # default domain provider_api.ProviderAPIs._clear_registry_instances() with mock.patch('builtins.print') as mock_print: self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main) - file_name = ('keystone.%s.conf' % self.default_domain['name']) + file_name = 'keystone.%s.conf' % self.default_domain['name'] error_msg = _( 'Domain: %(domain)s already has a configuration defined - ' - 'ignoring file: %(file)s.') % { - 'domain': self.default_domain['name'], - 'file': os.path.join(CONF.identity.domain_config_dir, - file_name)} + 'ignoring file: %(file)s.' + ) % { + 'domain': self.default_domain['name'], + 'file': os.path.join( + CONF.identity.domain_config_dir, file_name + ), + } mock_print.assert_has_calls([mock.call(error_msg)]) res = PROVIDERS.domain_config_api.get_config( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) # The initial config should not have been overwritten self.assertEqual(default_config, res) @@ -675,54 +768,81 @@ class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase): class CliDomainConfigNoOptionsTestCase(CliDomainConfigAllTestCase): def config(self, config_files): - CONF(args=['domain_config_upload'], - project='keystone', default_config_files=config_files) + CONF( + args=['domain_config_upload'], + project='keystone', + default_config_files=config_files, + ) def test_config_upload(self): provider_api.ProviderAPIs._clear_registry_instances() with mock.patch('builtins.print') as mock_print: self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main) mock_print.assert_has_calls( - [mock.call( - _('At least one option must be provided, use either ' - '--all or --domain-name'))]) + [ + mock.call( + _( + 'At least one option must be provided, use either ' + '--all or --domain-name' + ) + ) + ] + ) class CliDomainConfigTooManyOptionsTestCase(CliDomainConfigAllTestCase): def config(self, config_files): - CONF(args=['domain_config_upload', '--all', '--domain-name', - 'Default'], - project='keystone', default_config_files=config_files) + CONF( + args=['domain_config_upload', '--all', '--domain-name', 'Default'], + project='keystone', + default_config_files=config_files, + ) def test_config_upload(self): provider_api.ProviderAPIs._clear_registry_instances() with mock.patch('builtins.print') as mock_print: self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main) mock_print.assert_has_calls( - [mock.call(_('The --all option cannot be used with ' - 'the --domain-name option'))]) + [ + mock.call( + _( + 'The --all option cannot be used with ' + 'the --domain-name option' + ) + ) + ] + ) class CliDomainConfigInvalidDomainTestCase(CliDomainConfigAllTestCase): def config(self, config_files): self.invalid_domain_name = uuid.uuid4().hex - CONF(args=['domain_config_upload', '--domain-name', - self.invalid_domain_name], - project='keystone', default_config_files=config_files) + CONF( + args=[ + 'domain_config_upload', + '--domain-name', + self.invalid_domain_name, + ], + project='keystone', + default_config_files=config_files, + ) def test_config_upload(self): provider_api.ProviderAPIs._clear_registry_instances() with mock.patch('builtins.print') as mock_print: self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main) file_name = 'keystone.%s.conf' % self.invalid_domain_name - error_msg = (_( + error_msg = _( 'Invalid domain name: %(domain)s found in config file name: ' - '%(file)s - ignoring this file.') % { - 'domain': self.invalid_domain_name, - 'file': os.path.join(CONF.identity.domain_config_dir, - file_name)}) + '%(file)s - ignoring this file.' + ) % { + 'domain': self.invalid_domain_name, + 'file': os.path.join( + CONF.identity.domain_config_dir, file_name + ), + } mock_print.assert_has_calls([mock.call(error_msg)]) @@ -741,17 +861,21 @@ class TestDomainConfigFinder(unit.BaseTestCase): domain_configs = list(cli._domain_config_finder('.')) expected_domain_configs = [('./keystone.domain0.conf', 'domain0')] - self.assertThat(domain_configs, - matchers.Equals(expected_domain_configs)) + self.assertThat( + domain_configs, matchers.Equals(expected_domain_configs) + ) - expected_msg_template = ('Ignoring file (%s) while scanning ' - 'domain config directory') + expected_msg_template = ( + 'Ignoring file (%s) while scanning ' 'domain config directory' + ) self.assertThat( self.logging.output, - matchers.Contains(expected_msg_template % 'file.txt')) + matchers.Contains(expected_msg_template % 'file.txt'), + ) self.assertThat( self.logging.output, - matchers.Contains(expected_msg_template % 'keystone.conf')) + matchers.Contains(expected_msg_template % 'keystone.conf'), + ) class CliDBSyncTestCase(unit.BaseTestCase): @@ -789,40 +913,53 @@ class CliDBSyncTestCase(unit.BaseTestCase): super().tearDown() def _assert_correct_call(self, mocked_function): - for func in [upgrades.offline_sync_database_to_version, - upgrades.expand_schema, - upgrades.migrate_data, - upgrades.contract_schema]: + for func in [ + upgrades.offline_sync_database_to_version, + upgrades.expand_schema, + upgrades.migrate_data, + upgrades.contract_schema, + ]: if func == mocked_function: self.assertTrue(func.called) else: self.assertFalse(func.called) def test_db_sync(self): - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) cli.DbSync.main() - self._assert_correct_call( - upgrades.offline_sync_database_to_version) + self._assert_correct_call(upgrades.offline_sync_database_to_version) def test_db_sync_expand(self): self.command_expand = True - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) cli.DbSync.main() self._assert_correct_call(upgrades.expand_schema) def test_db_sync_migrate(self): self.command_migrate = True - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) cli.DbSync.main() self._assert_correct_call(upgrades.migrate_data) def test_db_sync_contract(self): self.command_contract = True - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) cli.DbSync.main() self._assert_correct_call(upgrades.contract_schema) @@ -849,13 +986,16 @@ class TestMappingPopulate(unit.SQLDriverOverrides, unit.TestCase): def config_overrides(self): super(TestMappingPopulate, self).config_overrides() self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=False + ) def config(self, config_files): - CONF(args=['mapping_populate', '--domain-name', 'Default'], - project='keystone', - default_config_files=config_files) + CONF( + args=['mapping_populate', '--domain-name', 'Default'], + project='keystone', + default_config_files=config_files, + ) def test_mapping_populate(self): # mapping_populate should create id mappings. Test plan: @@ -873,7 +1013,8 @@ class TestMappingPopulate(unit.SQLDriverOverrides, unit.TestCase): local_entity = { 'domain_id': CONF.identity.default_domain_id, 'local_id': user['id'], - 'entity_type': identity_mapping.EntityType.USER} + 'entity_type': identity_mapping.EntityType.USER, + } self.assertIsNone( PROVIDERS.id_mapping_api.get_public_id(local_entity) ) @@ -886,13 +1027,17 @@ class TestMappingPopulate(unit.SQLDriverOverrides, unit.TestCase): local_entity = { 'domain_id': CONF.identity.default_domain_id, 'local_id': user['id'], - 'entity_type': identity_mapping.EntityType.USER} + 'entity_type': identity_mapping.EntityType.USER, + } self.assertIsNotNone( - PROVIDERS.id_mapping_api.get_public_id(local_entity)) + PROVIDERS.id_mapping_api.get_public_id(local_entity) + ) def test_bad_domain_name(self): - CONF(args=['mapping_populate', '--domain-name', uuid.uuid4().hex], - project='keystone') + CONF( + args=['mapping_populate', '--domain-name', uuid.uuid4().hex], + project='keystone', + ) # backends are loaded again in the command handler provider_api.ProviderAPIs._clear_registry_instances() # NOTE: assertEqual is used on purpose. assertFalse passes with None. @@ -912,24 +1057,35 @@ class CliDomainConfigUploadNothing(unit.BaseTestCase): # setup a test database. def fake_load_backends(self): self.resource_manager = mock.Mock() - self.useFixture(fixtures.MockPatchObject( - cli.DomainConfigUploadFiles, 'load_backends', fake_load_backends)) + + self.useFixture( + fixtures.MockPatchObject( + cli.DomainConfigUploadFiles, + 'load_backends', + fake_load_backends, + ) + ) tempdir = self.useFixture(fixtures.TempDir()) config_fixture.config(group='identity', domain_config_dir=tempdir.path) self.logging = self.useFixture( - fixtures.FakeLogger(level=logging.DEBUG)) + fixtures.FakeLogger(level=logging.DEBUG) + ) def test_uploading_all_from_an_empty_directory(self): - CONF(args=['domain_config_upload', '--all'], project='keystone', - default_config_files=[]) + CONF( + args=['domain_config_upload', '--all'], + project='keystone', + default_config_files=[], + ) cli.DomainConfigUpload.main() - expected_msg = ('No domain configs uploaded from %r' % - CONF.identity.domain_config_dir) - self.assertThat(self.logging.output, - matchers.Contains(expected_msg)) + expected_msg = ( + 'No domain configs uploaded from %r' + % CONF.identity.domain_config_dir + ) + self.assertThat(self.logging.output, matchers.Contains(expected_msg)) class CachingDoctorTests(unit.TestCase): @@ -956,14 +1112,16 @@ class CachingDoctorTests(unit.TestCase): # Failure Case 2: Caching disabled and backend configured self.config_fixture.config(group='cache', enabled=False) - self.config_fixture.config(group='cache', - backend='dogpile.cache.memory') + self.config_fixture.config( + group='cache', backend='dogpile.cache.memory' + ) self.assertFalse(caching.symptom_caching_enabled_without_a_backend()) # Failure Case 3: Caching enabled and backend configured self.config_fixture.config(group='cache', enabled=True) - self.config_fixture.config(group='cache', - backend='dogpile.cache.memory') + self.config_fixture.config( + group='cache', backend='dogpile.cache.memory' + ) self.assertFalse(caching.symptom_caching_enabled_without_a_backend()) @mock.patch('keystone.cmd.doctor.caching.cache.CACHE_REGION') @@ -971,7 +1129,7 @@ class CachingDoctorTests(unit.TestCase): self.config_fixture.config(group='cache', enabled=True) self.config_fixture.config( group='cache', - memcache_servers=['alpha.com:11211', 'beta.com:11211'] + memcache_servers=['alpha.com:11211', 'beta.com:11211'], ) self.config_fixture.config( group='cache', backend='dogpile.cache.memcached' @@ -979,9 +1137,10 @@ class CachingDoctorTests(unit.TestCase): # No symptom detected: Caching driver can connect to both memcached # servers - cache_mock.actual_backend.client.get_stats.return_value = ( - [('alpha.com', {}), ('beta.com', {})] - ) + cache_mock.actual_backend.client.get_stats.return_value = [ + ('alpha.com', {}), + ('beta.com', {}), + ] self.assertFalse(caching.symptom_connection_to_memcached()) # Symptom detected: Caching driver can't connect to either memcached @@ -998,7 +1157,7 @@ class CachingDoctorTests(unit.TestCase): self.config_fixture.config( group='cache', - memcache_servers=['alpha.com:11211', 'beta.com:11211'] + memcache_servers=['alpha.com:11211', 'beta.com:11211'], ) self.config_fixture.config( group='cache', backend='oslo_cache.memcache_pool' @@ -1006,9 +1165,10 @@ class CachingDoctorTests(unit.TestCase): # No symptom detected: Caching driver can connect to both memcached # servers - cache_mock.actual_backend.client.get_stats.return_value = ( - [('alpha.com', {}), ('beta.com', {})] - ) + cache_mock.actual_backend.client.get_stats.return_value = [ + ('alpha.com', {}), + ('beta.com', {}), + ] self.assertFalse(caching.symptom_connection_to_memcached()) # Symptom detected: Caching driver can't connect to either memcached @@ -1029,18 +1189,22 @@ class CredentialDoctorTests(unit.TestCase): def test_credential_and_fernet_key_repositories_match(self): # Symptom Detected: Key repository paths are not unique directory = self.useFixture(fixtures.TempDir()).path - self.config_fixture.config(group='credential', - key_repository=directory) - self.config_fixture.config(group='fernet_tokens', - key_repository=directory) + self.config_fixture.config( + group='credential', key_repository=directory + ) + self.config_fixture.config( + group='fernet_tokens', key_repository=directory + ) self.assertTrue(credential.symptom_unique_key_repositories()) def test_credential_and_fernet_key_repositories_are_unique(self): # No Symptom Detected: Key repository paths are unique - self.config_fixture.config(group='credential', - key_repository='/etc/keystone/cred-repo') - self.config_fixture.config(group='fernet_tokens', - key_repository='/etc/keystone/fernet-repo') + self.config_fixture.config( + group='credential', key_repository='/etc/keystone/cred-repo' + ) + self.config_fixture.config( + group='fernet_tokens', key_repository='/etc/keystone/fernet-repo' + ) self.assertFalse(credential.symptom_unique_key_repositories()) @mock.patch('keystone.cmd.doctor.credential.utils') @@ -1049,7 +1213,8 @@ class CredentialDoctorTests(unit.TestCase): self.config_fixture.config(group='credential', provider='fernet') mock_utils.FernetUtils().validate_key_repository.return_value = False self.assertTrue( - credential.symptom_usability_of_credential_fernet_key_repository()) + credential.symptom_usability_of_credential_fernet_key_repository() + ) @mock.patch('keystone.cmd.doctor.credential.utils') def test_usability_of_cred_fernet_key_repo_not_raised(self, mock_utils): @@ -1057,13 +1222,15 @@ class CredentialDoctorTests(unit.TestCase): self.config_fixture.config(group='credential', provider='my-driver') mock_utils.FernetUtils().validate_key_repository.return_value = True self.assertFalse( - credential.symptom_usability_of_credential_fernet_key_repository()) + credential.symptom_usability_of_credential_fernet_key_repository() + ) # No Symptom Detected: key repository is not world readable self.config_fixture.config(group='credential', provider='fernet') mock_utils.FernetUtils().validate_key_repository.return_value = True self.assertFalse( - credential.symptom_usability_of_credential_fernet_key_repository()) + credential.symptom_usability_of_credential_fernet_key_repository() + ) @mock.patch('keystone.cmd.doctor.credential.utils') def test_keys_in_credential_fernet_key_repository_raised(self, mock_utils): @@ -1071,22 +1238,26 @@ class CredentialDoctorTests(unit.TestCase): self.config_fixture.config(group='credential', provider='fernet') mock_utils.FernetUtils().load_keys.return_value = False self.assertTrue( - credential.symptom_keys_in_credential_fernet_key_repository()) + credential.symptom_keys_in_credential_fernet_key_repository() + ) @mock.patch('keystone.cmd.doctor.credential.utils') def test_keys_in_credential_fernet_key_repository_not_raised( - self, mock_utils): + self, mock_utils + ): # No Symptom Detected: Custom driver is used self.config_fixture.config(group='credential', provider='my-driver') mock_utils.FernetUtils().load_keys.return_value = True self.assertFalse( - credential.symptom_keys_in_credential_fernet_key_repository()) + credential.symptom_keys_in_credential_fernet_key_repository() + ) # No Symptom Detected: Key repo is not empty, fernet is current driver self.config_fixture.config(group='credential', provider='fernet') mock_utils.FernetUtils().load_keys.return_value = True self.assertFalse( - credential.symptom_keys_in_credential_fernet_key_repository()) + credential.symptom_keys_in_credential_fernet_key_repository() + ) class DatabaseDoctorTests(unit.TestCase): @@ -1094,17 +1265,20 @@ class DatabaseDoctorTests(unit.TestCase): def test_symptom_is_raised_if_database_connection_is_SQLite(self): # Symptom Detected: Database connection is sqlite self.config_fixture.config( - group='database', - connection='sqlite:///mydb') + group='database', connection='sqlite:///mydb' + ) self.assertTrue( - doc_database.symptom_database_connection_is_not_SQLite()) + doc_database.symptom_database_connection_is_not_SQLite() + ) # No Symptom Detected: Database connection is MySQL self.config_fixture.config( group='database', - connection='mysql+mysqlconnector://admin:secret@localhost/mydb') + connection='mysql+mysqlconnector://admin:secret@localhost/mydb', + ) self.assertFalse( - doc_database.symptom_database_connection_is_not_SQLite()) + doc_database.symptom_database_connection_is_not_SQLite() + ) class DebugDoctorTests(unit.TestCase): @@ -1125,23 +1299,27 @@ class FederationDoctorTests(unit.TestCase): # Symptom Detected: There is a comma in path to public cert file self.config_fixture.config(group='saml', certfile='file,cert.pem') self.assertTrue( - federation.symptom_comma_in_SAML_public_certificate_path()) + federation.symptom_comma_in_SAML_public_certificate_path() + ) # No Symptom Detected: There is no comma in the path self.config_fixture.config(group='saml', certfile='signing_cert.pem') self.assertFalse( - federation.symptom_comma_in_SAML_public_certificate_path()) + federation.symptom_comma_in_SAML_public_certificate_path() + ) def test_symptom_comma_in_SAML_private_key_file_path(self): # Symptom Detected: There is a comma in path to private key file self.config_fixture.config(group='saml', keyfile='file,key.pem') self.assertTrue( - federation.symptom_comma_in_SAML_private_key_file_path()) + federation.symptom_comma_in_SAML_private_key_file_path() + ) # No Symptom Detected: There is no comma in the path self.config_fixture.config(group='saml', keyfile='signing_key.pem') self.assertFalse( - federation.symptom_comma_in_SAML_private_key_file_path()) + federation.symptom_comma_in_SAML_private_key_file_path() + ) class LdapDoctorTests(unit.TestCase): @@ -1152,87 +1330,88 @@ class LdapDoctorTests(unit.TestCase): self.config_fixture.config(group='ldap', user_enabled_emulation=False) self.config_fixture.config( group='ldap', - user_enabled_emulation_dn='cn=enabled_users,dc=example,dc=com') - self.assertTrue( - ldap.symptom_LDAP_user_enabled_emulation_dn_ignored()) + user_enabled_emulation_dn='cn=enabled_users,dc=example,dc=com', + ) + self.assertTrue(ldap.symptom_LDAP_user_enabled_emulation_dn_ignored()) def test_user_enabled_emulation_dn_ignored_not_raised(self): # No symptom when configuration set properly self.config_fixture.config(group='ldap', user_enabled_emulation=True) self.config_fixture.config( group='ldap', - user_enabled_emulation_dn='cn=enabled_users,dc=example,dc=com') - self.assertFalse( - ldap.symptom_LDAP_user_enabled_emulation_dn_ignored()) + user_enabled_emulation_dn='cn=enabled_users,dc=example,dc=com', + ) + self.assertFalse(ldap.symptom_LDAP_user_enabled_emulation_dn_ignored()) # No symptom when both configurations disabled self.config_fixture.config(group='ldap', user_enabled_emulation=False) - self.config_fixture.config(group='ldap', - user_enabled_emulation_dn=None) - self.assertFalse( - ldap.symptom_LDAP_user_enabled_emulation_dn_ignored()) + self.config_fixture.config( + group='ldap', user_enabled_emulation_dn=None + ) + self.assertFalse(ldap.symptom_LDAP_user_enabled_emulation_dn_ignored()) def test_user_enabled_emulation_use_group_config_ignored_raised(self): # Symptom when user enabled emulation isn't enabled but group_config is # enabled self.config_fixture.config(group='ldap', user_enabled_emulation=False) self.config_fixture.config( - group='ldap', - user_enabled_emulation_use_group_config=True) + group='ldap', user_enabled_emulation_use_group_config=True + ) self.assertTrue( - ldap. - symptom_LDAP_user_enabled_emulation_use_group_config_ignored()) + ldap.symptom_LDAP_user_enabled_emulation_use_group_config_ignored() + ) def test_user_enabled_emulation_use_group_config_ignored_not_raised(self): # No symptom when configuration deactivated self.config_fixture.config(group='ldap', user_enabled_emulation=False) self.config_fixture.config( - group='ldap', - user_enabled_emulation_use_group_config=False) + group='ldap', user_enabled_emulation_use_group_config=False + ) self.assertFalse( - ldap. - symptom_LDAP_user_enabled_emulation_use_group_config_ignored()) + ldap.symptom_LDAP_user_enabled_emulation_use_group_config_ignored() + ) # No symptom when configurations set properly self.config_fixture.config(group='ldap', user_enabled_emulation=True) self.config_fixture.config( - group='ldap', - user_enabled_emulation_use_group_config=True) + group='ldap', user_enabled_emulation_use_group_config=True + ) self.assertFalse( - ldap. - symptom_LDAP_user_enabled_emulation_use_group_config_ignored()) + ldap.symptom_LDAP_user_enabled_emulation_use_group_config_ignored() + ) def test_group_members_are_ids_disabled_raised(self): # Symptom when objectclass is set to posixGroup but members_are_ids are # not enabled - self.config_fixture.config(group='ldap', - group_objectclass='posixGroup') - self.config_fixture.config(group='ldap', - group_members_are_ids=False) + self.config_fixture.config( + group='ldap', group_objectclass='posixGroup' + ) + self.config_fixture.config(group='ldap', group_members_are_ids=False) self.assertTrue(ldap.symptom_LDAP_group_members_are_ids_disabled()) def test_group_members_are_ids_disabled_not_raised(self): # No symptom when the configurations are set properly - self.config_fixture.config(group='ldap', - group_objectclass='posixGroup') - self.config_fixture.config(group='ldap', - group_members_are_ids=True) + self.config_fixture.config( + group='ldap', group_objectclass='posixGroup' + ) + self.config_fixture.config(group='ldap', group_members_are_ids=True) self.assertFalse(ldap.symptom_LDAP_group_members_are_ids_disabled()) # No symptom when configuration deactivated - self.config_fixture.config(group='ldap', - group_objectclass='groupOfNames') - self.config_fixture.config(group='ldap', - group_members_are_ids=False) + self.config_fixture.config( + group='ldap', group_objectclass='groupOfNames' + ) + self.config_fixture.config(group='ldap', group_members_are_ids=False) self.assertFalse(ldap.symptom_LDAP_group_members_are_ids_disabled()) @mock.patch('os.listdir') @mock.patch('os.path.isdir') - def test_file_based_domain_specific_configs_raised(self, mocked_isdir, - mocked_listdir): + def test_file_based_domain_specific_configs_raised( + self, mocked_isdir, mocked_listdir + ): self.config_fixture.config( - group='identity', - domain_specific_drivers_enabled=True) + group='identity', domain_specific_drivers_enabled=True + ) self.config_fixture.config( - group='identity', - domain_configurations_from_database=False) + group='identity', domain_configurations_from_database=False + ) # Symptom if there is no existing directory mocked_isdir.return_value = False @@ -1245,45 +1424,51 @@ class LdapDoctorTests(unit.TestCase): @mock.patch('os.listdir') @mock.patch('os.path.isdir') - def test_file_based_domain_specific_configs_not_raised(self, mocked_isdir, - mocked_listdir): + def test_file_based_domain_specific_configs_not_raised( + self, mocked_isdir, mocked_listdir + ): # No symptom if both configurations deactivated self.config_fixture.config( - group='identity', - domain_specific_drivers_enabled=False) + group='identity', domain_specific_drivers_enabled=False + ) self.config_fixture.config( - group='identity', - domain_configurations_from_database=False) + group='identity', domain_configurations_from_database=False + ) self.assertFalse( - ldap.symptom_LDAP_file_based_domain_specific_configs()) + ldap.symptom_LDAP_file_based_domain_specific_configs() + ) # No symptom if directory exists with no invalid filenames self.config_fixture.config( - group='identity', - domain_specific_drivers_enabled=True) + group='identity', domain_specific_drivers_enabled=True + ) self.config_fixture.config( - group='identity', - domain_configurations_from_database=False) + group='identity', domain_configurations_from_database=False + ) mocked_isdir.return_value = True mocked_listdir.return_value = ['keystone.domains.conf'] self.assertFalse( - ldap.symptom_LDAP_file_based_domain_specific_configs()) + ldap.symptom_LDAP_file_based_domain_specific_configs() + ) @mock.patch('os.listdir') @mock.patch('os.path.isdir') @mock.patch('keystone.cmd.doctor.ldap.configparser.ConfigParser') def test_file_based_domain_specific_configs_formatted_correctly_raised( - self, mocked_parser, mocked_isdir, mocked_listdir): - symptom = ('symptom_LDAP_file_based_domain_specific_configs' - '_formatted_correctly') + self, mocked_parser, mocked_isdir, mocked_listdir + ): + symptom = ( + 'symptom_LDAP_file_based_domain_specific_configs' + '_formatted_correctly' + ) # Symptom Detected: Ldap domain specific configuration files are not # formatted correctly self.config_fixture.config( - group='identity', - domain_specific_drivers_enabled=True) + group='identity', domain_specific_drivers_enabled=True + ) self.config_fixture.config( - group='identity', - domain_configurations_from_database=False) + group='identity', domain_configurations_from_database=False + ) mocked_isdir.return_value = True mocked_listdir.return_value = ['keystone.domains.conf'] @@ -1296,23 +1481,26 @@ class LdapDoctorTests(unit.TestCase): @mock.patch('os.listdir') @mock.patch('os.path.isdir') def test_file_based_domain_specific_configs_formatted_correctly_not_raised( - self, mocked_isdir, mocked_listdir): - symptom = ('symptom_LDAP_file_based_domain_specific_configs' - '_formatted_correctly') + self, mocked_isdir, mocked_listdir + ): + symptom = ( + 'symptom_LDAP_file_based_domain_specific_configs' + '_formatted_correctly' + ) # No Symptom Detected: Domain_specific drivers is not enabled self.config_fixture.config( - group='identity', - domain_specific_drivers_enabled=False) + group='identity', domain_specific_drivers_enabled=False + ) self.assertFalse(getattr(ldap, symptom)()) # No Symptom Detected: Domain configuration from database is enabled self.config_fixture.config( - group='identity', - domain_specific_drivers_enabled=True) + group='identity', domain_specific_drivers_enabled=True + ) self.assertFalse(getattr(ldap, symptom)()) self.config_fixture.config( - group='identity', - domain_configurations_from_database=True) + group='identity', domain_configurations_from_database=True + ) self.assertFalse(getattr(ldap, symptom)()) # No Symptom Detected: The directory in domain_config_dir doesn't exist @@ -1323,8 +1511,8 @@ class LdapDoctorTests(unit.TestCase): # configurations from database are disabled, directory exists, and no # exceptions found. self.config_fixture.config( - group='identity', - domain_configurations_from_database=False) + group='identity', domain_configurations_from_database=False + ) mocked_isdir.return_value = True # An empty directory should not raise this symptom self.assertFalse(getattr(ldap, symptom)()) @@ -1339,102 +1527,116 @@ class SecurityComplianceDoctorTests(unit.TestCase): def test_minimum_password_age_greater_than_password_expires_days(self): # Symptom Detected: Minimum password age is greater than the password # expires days. Both values are positive integers greater than zero. - self.config_fixture.config(group='security_compliance', - minimum_password_age=2) - self.config_fixture.config(group='security_compliance', - password_expires_days=1) + self.config_fixture.config( + group='security_compliance', minimum_password_age=2 + ) + self.config_fixture.config( + group='security_compliance', password_expires_days=1 + ) self.assertTrue( - security_compliance. - symptom_minimum_password_age_greater_than_expires_days()) + security_compliance.symptom_minimum_password_age_greater_than_expires_days() + ) def test_minimum_password_age_equal_to_password_expires_days(self): # Symptom Detected: Minimum password age is equal to the password # expires days. Both values are positive integers greater than zero. - self.config_fixture.config(group='security_compliance', - minimum_password_age=1) - self.config_fixture.config(group='security_compliance', - password_expires_days=1) + self.config_fixture.config( + group='security_compliance', minimum_password_age=1 + ) + self.config_fixture.config( + group='security_compliance', password_expires_days=1 + ) self.assertTrue( - security_compliance. - symptom_minimum_password_age_greater_than_expires_days()) + security_compliance.symptom_minimum_password_age_greater_than_expires_days() + ) def test_minimum_password_age_less_than_password_expires_days(self): # No Symptom Detected: Minimum password age is less than password # expires days. Both values are positive integers greater than zero. - self.config_fixture.config(group='security_compliance', - minimum_password_age=1) - self.config_fixture.config(group='security_compliance', - password_expires_days=2) + self.config_fixture.config( + group='security_compliance', minimum_password_age=1 + ) + self.config_fixture.config( + group='security_compliance', password_expires_days=2 + ) self.assertFalse( - security_compliance. - symptom_minimum_password_age_greater_than_expires_days()) + security_compliance.symptom_minimum_password_age_greater_than_expires_days() + ) def test_minimum_password_age_and_password_expires_days_deactivated(self): # No Symptom Detected: when minimum_password_age's default value is 0 # and password_expires_days' default value is None self.assertFalse( - security_compliance. - symptom_minimum_password_age_greater_than_expires_days()) + security_compliance.symptom_minimum_password_age_greater_than_expires_days() + ) def test_invalid_password_regular_expression(self): # Symptom Detected: Regular expression is invalid self.config_fixture.config( - group='security_compliance', - password_regex=r'^^(??=.*\d)$') + group='security_compliance', password_regex=r'^^(??=.*\d)$' + ) self.assertTrue( - security_compliance.symptom_invalid_password_regular_expression()) + security_compliance.symptom_invalid_password_regular_expression() + ) def test_valid_password_regular_expression(self): # No Symptom Detected: Regular expression is valid self.config_fixture.config( group='security_compliance', - password_regex=r'^(?=.*\d)(?=.*[a-zA-Z]).{7,}$') + password_regex=r'^(?=.*\d)(?=.*[a-zA-Z]).{7,}$', + ) self.assertFalse( - security_compliance.symptom_invalid_password_regular_expression()) + security_compliance.symptom_invalid_password_regular_expression() + ) def test_password_regular_expression_deactivated(self): # No Symptom Detected: Regular expression deactivated to None self.config_fixture.config( - group='security_compliance', - password_regex=None) + group='security_compliance', password_regex=None + ) self.assertFalse( - security_compliance.symptom_invalid_password_regular_expression()) + security_compliance.symptom_invalid_password_regular_expression() + ) def test_password_regular_expression_description_not_set(self): # Symptom Detected: Regular expression is set but description is not self.config_fixture.config( group='security_compliance', - password_regex=r'^(?=.*\d)(?=.*[a-zA-Z]).{7,}$') + password_regex=r'^(?=.*\d)(?=.*[a-zA-Z]).{7,}$', + ) self.config_fixture.config( - group='security_compliance', - password_regex_description=None) + group='security_compliance', password_regex_description=None + ) self.assertTrue( - security_compliance. - symptom_password_regular_expression_description_not_set()) + security_compliance.symptom_password_regular_expression_description_not_set() + ) def test_password_regular_expression_description_set(self): # No Symptom Detected: Regular expression and description are set desc = '1 letter, 1 digit, and a minimum length of 7 is required' self.config_fixture.config( group='security_compliance', - password_regex=r'^(?=.*\d)(?=.*[a-zA-Z]).{7,}$') + password_regex=r'^(?=.*\d)(?=.*[a-zA-Z]).{7,}$', + ) self.config_fixture.config( - group='security_compliance', - password_regex_description=desc) + group='security_compliance', password_regex_description=desc + ) self.assertFalse( - security_compliance. - symptom_password_regular_expression_description_not_set()) + security_compliance.symptom_password_regular_expression_description_not_set() + ) def test_password_regular_expression_description_deactivated(self): # No Symptom Detected: Regular expression and description are # deactivated to None self.config_fixture.config( - group='security_compliance', password_regex=None) + group='security_compliance', password_regex=None + ) self.config_fixture.config( - group='security_compliance', password_regex_description=None) + group='security_compliance', password_regex_description=None + ) self.assertFalse( - security_compliance. - symptom_password_regular_expression_description_not_set()) + security_compliance.symptom_password_regular_expression_description_not_set() + ) class TokensDoctorTests(unit.TestCase): @@ -1465,7 +1667,8 @@ class TokenFernetDoctorTests(unit.TestCase): self.config_fixture.config(group='token', provider='fernet') mock_utils.FernetUtils().validate_key_repository.return_value = False self.assertTrue( - tokens_fernet.symptom_usability_of_Fernet_key_repository()) + tokens_fernet.symptom_usability_of_Fernet_key_repository() + ) @mock.patch('keystone.cmd.doctor.tokens_fernet.utils') def test_usability_of_Fernet_key_repository_not_raised(self, mock_utils): @@ -1473,22 +1676,23 @@ class TokenFernetDoctorTests(unit.TestCase): self.config_fixture.config(group='token', provider='uuid') mock_utils.FernetUtils().validate_key_repository.return_value = False self.assertFalse( - tokens_fernet.symptom_usability_of_Fernet_key_repository()) + tokens_fernet.symptom_usability_of_Fernet_key_repository() + ) # No Symptom Detected: configs set properly, key repo is not world # readable but is user readable self.config_fixture.config(group='token', provider='fernet') mock_utils.FernetUtils().validate_key_repository.return_value = True self.assertFalse( - tokens_fernet.symptom_usability_of_Fernet_key_repository()) + tokens_fernet.symptom_usability_of_Fernet_key_repository() + ) @mock.patch('keystone.cmd.doctor.tokens_fernet.utils') def test_keys_in_Fernet_key_repository_raised(self, mock_utils): # Symptom Detected: Fernet key repository is empty self.config_fixture.config(group='token', provider='fernet') mock_utils.FernetUtils().load_keys.return_value = False - self.assertTrue( - tokens_fernet.symptom_keys_in_Fernet_key_repository()) + self.assertTrue(tokens_fernet.symptom_keys_in_Fernet_key_repository()) @mock.patch('keystone.cmd.doctor.tokens_fernet.utils') def test_keys_in_Fernet_key_repository_not_raised(self, mock_utils): @@ -1496,14 +1700,16 @@ class TokenFernetDoctorTests(unit.TestCase): self.config_fixture.config(group='token', provider='uuid') mock_utils.FernetUtils().load_keys.return_value = True self.assertFalse( - tokens_fernet.symptom_usability_of_Fernet_key_repository()) + tokens_fernet.symptom_usability_of_Fernet_key_repository() + ) # No Symptom Detected: configs set properly, key repo has been # populated with keys self.config_fixture.config(group='token', provider='fernet') mock_utils.FernetUtils().load_keys.return_value = True self.assertFalse( - tokens_fernet.symptom_usability_of_Fernet_key_repository()) + tokens_fernet.symptom_usability_of_Fernet_key_repository() + ) class TestMappingPurge(unit.SQLDriverOverrides, unit.BaseTestCase): @@ -1535,8 +1741,11 @@ class TestMappingPurge(unit.SQLDriverOverrides, unit.BaseTestCase): self.command_domain_name = None self.command_local_id = None self.command_public_id = None - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) self.assertRaises(ValueError, cli.MappingPurge.main) def test_mapping_purge_with_all_and_other_argument_fails(self): @@ -1546,39 +1755,42 @@ class TestMappingPurge(unit.SQLDriverOverrides, unit.BaseTestCase): self.command_domain_name = None self.command_local_id = None self.command_public_id = None - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) self.assertRaises(ValueError, cli.MappingPurge.main) def test_mapping_purge_with_only_all_passes(self): - args = (['--all']) + args = ['--all'] res = self.parser.parse_args(args) self.assertTrue(vars(res)['all']) def test_mapping_purge_with_domain_name_argument_succeeds(self): - args = (['--domain-name', uuid.uuid4().hex]) + args = ['--domain-name', uuid.uuid4().hex] self.parser.parse_args(args) def test_mapping_purge_with_public_id_argument_succeeds(self): - args = (['--public-id', uuid.uuid4().hex]) + args = ['--public-id', uuid.uuid4().hex] self.parser.parse_args(args) def test_mapping_purge_with_local_id_argument_succeeds(self): - args = (['--local-id', uuid.uuid4().hex]) + args = ['--local-id', uuid.uuid4().hex] self.parser.parse_args(args) def test_mapping_purge_with_type_argument_succeeds(self): - args = (['--type', 'user']) + args = ['--type', 'user'] self.parser.parse_args(args) - args = (['--type', 'group']) + args = ['--type', 'group'] self.parser.parse_args(args) def test_mapping_purge_with_invalid_argument_fails(self): - args = (['--invalid-option', 'some value']) + args = ['--invalid-option', 'some value'] self.assertRaises(unit.UnexpectedExit, self.parser.parse_args, args) def test_mapping_purge_with_all_other_combinations_passes(self): - args = (['--type', 'user', '--local-id', uuid.uuid4().hex]) + args = ['--type', 'user', '--local-id', uuid.uuid4().hex] self.parser.parse_args(args) args.append('--domain-name') args.append('test') @@ -1595,22 +1807,33 @@ class TestMappingPurge(unit.SQLDriverOverrides, unit.BaseTestCase): self.command_domain_name = None self.command_local_id = uuid.uuid4().hex self.command_public_id = uuid.uuid4().hex - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) def fake_load_backends(): return dict( id_mapping_api=keystone.identity.core.MappingManager, - resource_api=None) + resource_api=None, + ) - self.useFixture(fixtures.MockPatch( - 'keystone.server.backends.load_backends', - side_effect=fake_load_backends)) + self.useFixture( + fixtures.MockPatch( + 'keystone.server.backends.load_backends', + side_effect=fake_load_backends, + ) + ) cli.MappingPurge.main() - purge_mock.assert_called_with({'entity_type': 'user', - 'local_id': self.command_local_id, - 'public_id': self.command_public_id}) + purge_mock.assert_called_with( + { + 'entity_type': 'user', + 'local_id': self.command_local_id, + 'public_id': self.command_public_id, + } + ) class TestUserMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): @@ -1637,13 +1860,16 @@ class TestUserMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): def config_overrides(self): super(TestUserMappingPurgeFunctional, self).config_overrides() self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=False + ) def config(self, config_files): - CONF(args=['mapping_purge', '--type', 'user'], - project='keystone', - default_config_files=config_files) + CONF( + args=['mapping_purge', '--type', 'user'], + project='keystone', + default_config_files=config_files, + ) def test_purge_by_user_type(self): # Grab the list of the users from the backend directly to avoid @@ -1659,7 +1885,7 @@ class TestUserMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): group_ref = { 'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': CONF.identity.default_domain_id + 'domain_id': CONF.identity.default_domain_id, } PROVIDERS.identity_api.driver.create_group(group_ref['id'], group_ref) PROVIDERS.identity_api.list_groups() @@ -1670,14 +1896,17 @@ class TestUserMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): local_entity = { 'domain_id': CONF.identity.default_domain_id, 'local_id': user['id'], - 'entity_type': identity_mapping.EntityType.USER} + 'entity_type': identity_mapping.EntityType.USER, + } self.assertIsNotNone( - PROVIDERS.id_mapping_api.get_public_id(local_entity)) + PROVIDERS.id_mapping_api.get_public_id(local_entity) + ) group_entity = { 'domain_id': CONF.identity.default_domain_id, 'local_id': group_ref['id'], - 'entity_type': identity_mapping.EntityType.GROUP} + 'entity_type': identity_mapping.EntityType.GROUP, + } self.assertIsNotNone( PROVIDERS.id_mapping_api.get_public_id(group_entity) ) @@ -1691,7 +1920,8 @@ class TestUserMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): local_entity = { 'domain_id': CONF.identity.default_domain_id, 'local_id': user['id'], - 'entity_type': identity_mapping.EntityType.USER} + 'entity_type': identity_mapping.EntityType.USER, + } self.assertIsNone( PROVIDERS.id_mapping_api.get_public_id(local_entity) ) @@ -1726,13 +1956,16 @@ class TestGroupMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): def config_overrides(self): super(TestGroupMappingPurgeFunctional, self).config_overrides() self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) + self.config_fixture.config( + group='identity_mapping', backward_compatible_ids=False + ) def config(self, config_files): - CONF(args=['mapping_purge', '--type', 'group'], - project='keystone', - default_config_files=config_files) + CONF( + args=['mapping_purge', '--type', 'group'], + project='keystone', + default_config_files=config_files, + ) def test_purge_by_group_type(self): # Grab the list of the users from the backend directly to avoid @@ -1748,7 +1981,7 @@ class TestGroupMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): group_ref = { 'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': CONF.identity.default_domain_id + 'domain_id': CONF.identity.default_domain_id, } PROVIDERS.identity_api.driver.create_group(group_ref['id'], group_ref) PROVIDERS.identity_api.list_groups() @@ -1759,14 +1992,17 @@ class TestGroupMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): local_entity = { 'domain_id': CONF.identity.default_domain_id, 'local_id': user['id'], - 'entity_type': identity_mapping.EntityType.USER} + 'entity_type': identity_mapping.EntityType.USER, + } self.assertIsNotNone( - PROVIDERS.id_mapping_api.get_public_id(local_entity)) + PROVIDERS.id_mapping_api.get_public_id(local_entity) + ) group_entity = { 'domain_id': CONF.identity.default_domain_id, 'local_id': group_ref['id'], - 'entity_type': identity_mapping.EntityType.GROUP} + 'entity_type': identity_mapping.EntityType.GROUP, + } self.assertIsNotNone( PROVIDERS.id_mapping_api.get_public_id(group_entity) ) @@ -1776,16 +2012,15 @@ class TestGroupMappingPurgeFunctional(unit.SQLDriverOverrides, unit.TestCase): cli.MappingPurge.main() # Make sure the group mapping was purged - self.assertIsNone( - PROVIDERS.id_mapping_api.get_public_id(group_entity) - ) + self.assertIsNone(PROVIDERS.id_mapping_api.get_public_id(group_entity)) # Check that all the user mappings still exist for user in users: local_entity = { 'domain_id': CONF.identity.default_domain_id, 'local_id': user['id'], - 'entity_type': identity_mapping.EntityType.USER} + 'entity_type': identity_mapping.EntityType.USER, + } self.assertIsNotNone( PROVIDERS.id_mapping_api.get_public_id(local_entity) ) @@ -1823,16 +2058,21 @@ class TestTrustFlush(unit.SQLDriverOverrides, unit.BaseTestCase): self.command_trustor_user_id = None self.command_trustee_user_id = None self.command_date = datetime.datetime.utcnow() - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) def fake_load_backends(): - return dict( - trust_api=keystone.trust.core.Manager()) + return dict(trust_api=keystone.trust.core.Manager()) - self.useFixture(fixtures.MockPatch( - 'keystone.server.backends.load_backends', - side_effect=fake_load_backends)) + self.useFixture( + fixtures.MockPatch( + 'keystone.server.backends.load_backends', + side_effect=fake_load_backends, + ) + ) trust = cli.TrustFlush() trust.main() @@ -1841,16 +2081,21 @@ class TestTrustFlush(unit.SQLDriverOverrides, unit.BaseTestCase): self.command_trustor_user_id = None self.command_trustee_user_id = None self.command_date = '4/10/92' - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) def fake_load_backends(): - return dict( - trust_api=keystone.trust.core.Manager()) + return dict(trust_api=keystone.trust.core.Manager()) - self.useFixture(fixtures.MockPatch( - 'keystone.server.backends.load_backends', - side_effect=fake_load_backends)) + self.useFixture( + fixtures.MockPatch( + 'keystone.server.backends.load_backends', + side_effect=fake_load_backends, + ) + ) # Clear backend dependencies, since cli loads these manually provider_api.ProviderAPIs._clear_registry_instances() trust = cli.TrustFlush() @@ -1904,8 +2149,11 @@ class TestMappingEngineTester(unit.BaseTestCase): self.command_input = tmpinvalidfile self.command_prefix = None self.command_engine_debug = True - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) mapping_engine = cli.MappingEngineTester() self.assertRaises(SystemExit, mapping_engine.main) @@ -1920,8 +2168,11 @@ class TestMappingEngineTester(unit.BaseTestCase): self.command_input = "invalid.csv" self.command_prefix = None self.command_engine_debug = True - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) mapping_engine = cli.MappingEngineTester() self.assertRaises(SystemExit, mapping_engine.main) @@ -1943,8 +2194,11 @@ class TestMappingEngineTester(unit.BaseTestCase): self.command_input = tmpfilename self.command_prefix = None self.command_engine_debug = True - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) mapping_engine = cli.MappingEngineTester() with mock.patch('builtins.print') as mock_print: mapping_engine.main() @@ -1959,12 +2213,9 @@ class TestMappingEngineTester(unit.BaseTestCase): args, kwargs = call expected = { "group_names": [], - "user": { - "type": "ephemeral", - "name": "me" - }, + "user": {"type": "ephemeral", "name": "me"}, "projects": [], - "group_ids": ["0cd5e9"] + "group_ids": ["0cd5e9"], } self.assertEqual(jsonutils.loads(args[0]), expected) @@ -1986,11 +2237,13 @@ class TestMappingEngineTester(unit.BaseTestCase): self.command_input = tmpfilename self.command_prefix = None self.command_engine_debug = True - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) mapping_engine = cli.MappingEngineTester() - self.assertRaises(exception.ValidationError, - mapping_engine.main) + self.assertRaises(exception.ValidationError, mapping_engine.main) def test_mapping_engine_tester_logs_direct_maps(self): tempfilejson = self.useFixture(temporaryfile.SecureTempFile()) @@ -2010,8 +2263,11 @@ class TestMappingEngineTester(unit.BaseTestCase): self.command_input = tmpfilename self.command_prefix = None self.command_engine_debug = True - self.useFixture(fixtures.MockPatchObject( - CONF, 'command', self.FakeConfCommand(self))) + self.useFixture( + fixtures.MockPatchObject( + CONF, 'command', self.FakeConfCommand(self) + ) + ) mapping_engine = cli.MappingEngineTester() logging = self.useFixture(fixtures.FakeLogger(level=log.DEBUG)) mapping_engine.main() @@ -2041,7 +2297,7 @@ class CliStatusTestCase(unit.SQLDriverOverrides, unit.TestCase): 'identity:delete_trust': '', 'identity:get_trust': '', 'identity:list_roles_for_trust': '', - 'identity:get_role_for_trust': '' + 'identity:get_role_for_trust': '', } f.write(jsonutils.dumps(overridden_policies)) result = self.checks.check_trust_policies_are_not_empty() @@ -2052,7 +2308,7 @@ class CliStatusTestCase(unit.SQLDriverOverrides, unit.TestCase): 'identity:delete_trust': 'rule:admin_required', 'identity:get_trust': 'rule:admin_required', 'identity:list_roles_for_trust': 'rule:admin_required', - 'identity:get_role_for_trust': 'rule:admin_required' + 'identity:get_role_for_trust': 'rule:admin_required', } f.write(jsonutils.dumps(overridden_policies)) result = self.checks.check_trust_policies_are_not_empty() @@ -2074,11 +2330,12 @@ class CliStatusTestCase(unit.SQLDriverOverrides, unit.TestCase): self.assertEqual(upgradecheck.Code.SUCCESS, result.code) # Check domain-specific roles are not reported PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], - default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) domain_ref = unit.new_domain_ref() domain = PROVIDERS.resource_api.create_domain( - domain_ref['id'], domain_ref) + domain_ref['id'], domain_ref + ) role_ref = unit.new_role_ref(name='admin', domain_id=domain['id']) PROVIDERS.role_api.create_role(role_ref['id'], role_ref) result = self.checks.check_default_roles_are_immutable() diff --git a/keystone/tests/unit/test_config.py b/keystone/tests/unit/test_config.py index c524288ff2..bcf6b07e05 100644 --- a/keystone/tests/unit/test_config.py +++ b/keystone/tests/unit/test_config.py @@ -29,8 +29,12 @@ class ConfigTestCase(unit.TestCase): config_files = super(ConfigTestCase, self).config_files() sample_file = 'keystone.conf.sample' - args = ['--namespace', 'keystone', '--output-file', - unit.dirs.etc(sample_file)] + args = [ + '--namespace', + 'keystone', + '--output-file', + unit.dirs.etc(sample_file), + ] generator.main(args=args) config_files.insert(0, unit.dirs.etc(sample_file)) diff --git a/keystone/tests/unit/test_contrib_ec2_core.py b/keystone/tests/unit/test_contrib_ec2_core.py index a952428d24..eaddb73fc3 100644 --- a/keystone/tests/unit/test_contrib_ec2_core.py +++ b/keystone/tests/unit/test_contrib_ec2_core.py @@ -33,16 +33,19 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): super(EC2ContribCoreV3, self).setUp() self.cred_blob, self.credential = unit.new_ec2_credential( - self.user['id'], self.project_id) + self.user['id'], self.project_id + ) PROVIDERS.credential_api.create_credential( - self.credential['id'], self.credential) + self.credential['id'], self.credential + ) def test_http_get_method_not_allowed(self): - resp = self.get('/ec2tokens', - expected_status=http.client.METHOD_NOT_ALLOWED, - convert=False) - self.assertEqual(http.client.METHOD_NOT_ALLOWED, - resp.status_code) + resp = self.get( + '/ec2tokens', + expected_status=http.client.METHOD_NOT_ALLOWED, + convert=False, + ) + self.assertEqual(http.client.METHOD_NOT_ALLOWED, resp.status_code) def _test_valid_authentication_response_with_proper_secret(self, **kwargs): signer = ec2_utils.Ec2Signer(self.cred_blob['secret']) @@ -56,7 +59,7 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): 'params': { 'SignatureVersion': '2', 'Action': 'Test', - 'Timestamp': timestamp + 'Timestamp': timestamp, }, } credentials['signature'] = signer.generate(credentials) @@ -64,7 +67,8 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): '/ec2tokens', body={'credentials': credentials}, expected_status=http.client.OK, - **kwargs) + **kwargs + ) self.assertValidProjectScopedTokenResponse(resp, self.user) def test_valid_authentication_response_with_proper_secret(self): @@ -72,7 +76,8 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): def test_valid_authentication_response_with_proper_secret_noauth(self): self._test_valid_authentication_response_with_proper_secret( - noauth=True) + noauth=True + ) def test_valid_authentication_response_with_signature_v4(self): signer = ec2_utils.Ec2Signer(self.cred_blob['secret']) @@ -89,8 +94,9 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): ) body_hash = hashlib.sha256(hashed_payload.encode()).hexdigest() amz_credential = ( - 'AKIAIOSFODNN7EXAMPLE/%s/us-east-1/iam/aws4_request,' % - timestamp[:8]) + 'AKIAIOSFODNN7EXAMPLE/%s/us-east-1/iam/aws4_request,' + % timestamp[:8] + ) credentials = { 'access': self.cred_blob['access'], @@ -102,36 +108,33 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): 'Action': 'Test', 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', 'X-Amz-SignedHeaders': 'host,x-amz-date,', - 'X-Amz-Credential': amz_credential + 'X-Amz-Credential': amz_credential, }, - 'headers': { - 'X-Amz-Date': timestamp - }, - 'body_hash': body_hash + 'headers': {'X-Amz-Date': timestamp}, + 'body_hash': body_hash, } credentials['signature'] = signer.generate(credentials) resp = self.post( '/ec2tokens', body={'credentials': credentials}, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) self.assertValidProjectScopedTokenResponse(resp, self.user) def test_authenticate_with_empty_body_returns_bad_request(self): self.post( - '/ec2tokens', - body={}, - expected_status=http.client.BAD_REQUEST) + '/ec2tokens', body={}, expected_status=http.client.BAD_REQUEST + ) def test_authenticate_without_json_request_returns_bad_request(self): self.post( '/ec2tokens', body='not json', - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_authenticate_without_request_body_returns_bad_request(self): - self.post( - '/ec2tokens', - expected_status=http.client.BAD_REQUEST) + self.post('/ec2tokens', expected_status=http.client.BAD_REQUEST) def test_authenticate_without_proper_secret_returns_unauthorized(self): signer = ec2_utils.Ec2Signer('totally not the secret') @@ -145,20 +148,18 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): 'params': { 'SignatureVersion': '2', 'Action': 'Test', - 'Timestamp': timestamp + 'Timestamp': timestamp, }, } credentials['signature'] = signer.generate(credentials) self.post( '/ec2tokens', body={'credentials': credentials}, - expected_status=http.client.UNAUTHORIZED) + expected_status=http.client.UNAUTHORIZED, + ) def test_authenticate_expired_request(self): - self.config_fixture.config( - group='credential', - auth_ttl=5 - ) + self.config_fixture.config(group='credential', auth_ttl=5) signer = ec2_utils.Ec2Signer(self.cred_blob['secret']) past = timeutils.utcnow() - datetime.timedelta(minutes=10) timestamp = utils.isotime(past) @@ -171,20 +172,18 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): 'params': { 'SignatureVersion': '2', 'Action': 'Test', - 'Timestamp': timestamp + 'Timestamp': timestamp, }, } credentials['signature'] = signer.generate(credentials) self.post( '/ec2tokens', body={'credentials': credentials}, - expected_status=http.client.UNAUTHORIZED) + expected_status=http.client.UNAUTHORIZED, + ) def test_authenticate_expired_request_v4(self): - self.config_fixture.config( - group='credential', - auth_ttl=5 - ) + self.config_fixture.config(group='credential', auth_ttl=5) signer = ec2_utils.Ec2Signer(self.cred_blob['secret']) past = timeutils.utcnow() - datetime.timedelta(minutes=10) timestamp = utils.isotime(past) @@ -200,8 +199,9 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): ) body_hash = hashlib.sha256(hashed_payload.encode()).hexdigest() amz_credential = ( - 'AKIAIOSFODNN7EXAMPLE/%s/us-east-1/iam/aws4_request,' % - timestamp[:8]) + 'AKIAIOSFODNN7EXAMPLE/%s/us-east-1/iam/aws4_request,' + % timestamp[:8] + ) credentials = { 'access': self.cred_blob['access'], @@ -213,15 +213,14 @@ class EC2ContribCoreV3(test_v3.RestfulTestCase): 'Action': 'Test', 'X-Amz-Algorithm': 'AWS4-HMAC-SHA256', 'X-Amz-SignedHeaders': 'host,x-amz-date,', - 'X-Amz-Credential': amz_credential + 'X-Amz-Credential': amz_credential, }, - 'headers': { - 'X-Amz-Date': timestamp - }, - 'body_hash': body_hash + 'headers': {'X-Amz-Date': timestamp}, + 'body_hash': body_hash, } credentials['signature'] = signer.generate(credentials) self.post( '/ec2tokens', body={'credentials': credentials}, - expected_status=http.client.UNAUTHORIZED) + expected_status=http.client.UNAUTHORIZED, + ) diff --git a/keystone/tests/unit/test_contrib_s3_core.py b/keystone/tests/unit/test_contrib_s3_core.py index 75f85d7b08..a3d9178879 100644 --- a/keystone/tests/unit/test_contrib_s3_core.py +++ b/keystone/tests/unit/test_contrib_s3_core.py @@ -35,32 +35,42 @@ class S3ContribCore(test_v3.RestfulTestCase): self.load_backends() self.cred_blob, self.credential = unit.new_ec2_credential( - self.user['id'], self.project_id) + self.user['id'], self.project_id + ) PROVIDERS.credential_api.create_credential( - self.credential['id'], self.credential) + self.credential['id'], self.credential + ) def test_http_get_method_not_allowed(self): - resp = self.get('/s3tokens', - expected_status=http.client.METHOD_NOT_ALLOWED, - convert=False) - self.assertEqual(http.client.METHOD_NOT_ALLOWED, - resp.status_code) + resp = self.get( + '/s3tokens', + expected_status=http.client.METHOD_NOT_ALLOWED, + convert=False, + ) + self.assertEqual(http.client.METHOD_NOT_ALLOWED, resp.status_code) def _test_good_response(self, **kwargs): sts = 'string to sign' # opaque string from swift3 - sig = hmac.new(self.cred_blob['secret'].encode('ascii'), - sts.encode('ascii'), hashlib.sha1).digest() + sig = hmac.new( + self.cred_blob['secret'].encode('ascii'), + sts.encode('ascii'), + hashlib.sha1, + ).digest() resp = self.post( '/s3tokens', - body={'credentials': { - 'access': self.cred_blob['access'], - 'signature': base64.b64encode(sig).strip(), - 'token': base64.b64encode(sts.encode('ascii')).strip(), - }}, + body={ + 'credentials': { + 'access': self.cred_blob['access'], + 'signature': base64.b64encode(sig).strip(), + 'token': base64.b64encode(sts.encode('ascii')).strip(), + } + }, expected_status=http.client.OK, - **kwargs) - self.assertValidProjectScopedTokenResponse(resp, self.user, - forbid_token_id=True) + **kwargs + ) + self.assertValidProjectScopedTokenResponse( + resp, self.user, forbid_token_id=True + ) def test_good_response(self): self._test_good_response() @@ -70,182 +80,202 @@ class S3ContribCore(test_v3.RestfulTestCase): def test_bad_request(self): self.post( - '/s3tokens', - body={}, - expected_status=http.client.BAD_REQUEST) + '/s3tokens', body={}, expected_status=http.client.BAD_REQUEST + ) self.post( '/s3tokens', body="not json", - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) - self.post( - '/s3tokens', - expected_status=http.client.BAD_REQUEST) + self.post('/s3tokens', expected_status=http.client.BAD_REQUEST) def test_bad_response(self): self.post( '/s3tokens', - body={'credentials': { - 'access': self.cred_blob['access'], - 'signature': base64.b64encode(b'totally not the sig').strip(), - 'token': base64.b64encode(b'string to sign').strip(), - }}, - expected_status=http.client.UNAUTHORIZED) + body={ + 'credentials': { + 'access': self.cred_blob['access'], + 'signature': base64.b64encode( + b'totally not the sig' + ).strip(), + 'token': base64.b64encode(b'string to sign').strip(), + } + }, + expected_status=http.client.UNAUTHORIZED, + ) def test_good_signature_v1(self): - creds_ref = {'secret': - u'b121dd41cdcc42fe9f70e572e84295aa'} - credentials = {'token': - 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB' - 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM' - 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ' - 'vbV9zMy50eHQ=', - 'signature': 'IL4QLcLVaYgylF9iHj6Wb8BGZsw='} + creds_ref = {'secret': u'b121dd41cdcc42fe9f70e572e84295aa'} + credentials = { + 'token': 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB' + 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM' + 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ' + 'vbV9zMy50eHQ=', + 'signature': 'IL4QLcLVaYgylF9iHj6Wb8BGZsw=', + } - self.assertIsNone(s3tokens.S3Resource._check_signature( - creds_ref, credentials)) + self.assertIsNone( + s3tokens.S3Resource._check_signature(creds_ref, credentials) + ) def test_bad_signature_v1(self): - creds_ref = {'secret': - u'b121dd41cdcc42fe9f70e572e84295aa'} - credentials = {'token': - 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB' - 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM' - 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ' - 'vbV9zMy50eHQ=', - 'signature': uuid.uuid4().hex} + creds_ref = {'secret': u'b121dd41cdcc42fe9f70e572e84295aa'} + credentials = { + 'token': 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB' + 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM' + 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ' + 'vbV9zMy50eHQ=', + 'signature': uuid.uuid4().hex, + } - self.assertRaises(exception.Unauthorized, - s3tokens.S3Resource._check_signature, - creds_ref, credentials) + self.assertRaises( + exception.Unauthorized, + s3tokens.S3Resource._check_signature, + creds_ref, + credentials, + ) def test_good_signature_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy' - 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1' - 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==', - 'signature': - '730ba8f58df6ffeadd78f402e990b2910d60' - 'bc5c2aec63619734f096a4dd77be'} + creds_ref = {'secret': u'e7a7a2240136494986991a6598d9fb9f'} + credentials = { + 'token': 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' + 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy' + 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1' + 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==', + 'signature': '730ba8f58df6ffeadd78f402e990b2910d60' + 'bc5c2aec63619734f096a4dd77be', + } - self.assertIsNone(s3tokens.S3Resource._check_signature( - creds_ref, credentials)) + self.assertIsNone( + s3tokens.S3Resource._check_signature(creds_ref, credentials) + ) def test_good_iam_signature_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9pYW0vYXdzNF9yZXF1ZXN0CmYy' - 'MjE1NTgwZWViOWExNjczNTFiZDkzZTg2YzNiNmYwNGE5Mjhm' - 'NWM1NTIwYTM5MzVhNDUzNTQwYTA5NTY0YjU=', - 'signature': - 'db4e15b3040f6afaa9d9d16002de2fc3425b' - 'eea0c6ea8c1b2bb674f052030b7d'} + creds_ref = {'secret': u'e7a7a2240136494986991a6598d9fb9f'} + credentials = { + 'token': 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' + 'MTUwODI0L1JlZ2lvbk9uZS9pYW0vYXdzNF9yZXF1ZXN0CmYy' + 'MjE1NTgwZWViOWExNjczNTFiZDkzZTg2YzNiNmYwNGE5Mjhm' + 'NWM1NTIwYTM5MzVhNDUzNTQwYTA5NTY0YjU=', + 'signature': 'db4e15b3040f6afaa9d9d16002de2fc3425b' + 'eea0c6ea8c1b2bb674f052030b7d', + } - self.assertIsNone(s3tokens.S3Resource._check_signature( - creds_ref, credentials)) + self.assertIsNone( + s3tokens.S3Resource._check_signature(creds_ref, credentials) + ) def test_good_sts_signature_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9zdHMvYXdzNF9yZXF1ZXN0CmYy' - 'MjE1NTgwZWViOWExNjczNTFiZDkzZTg2YzNiNmYwNGE5Mjhm' - 'NWM1NTIwYTM5MzVhNDUzNTQwYTA5NTY0YjU=', - 'signature': - '3aa0b6f1414b92b2a32584068f83c6d09b7f' - 'daa11d4ea58912bbf1d8616ef56d'} + creds_ref = {'secret': u'e7a7a2240136494986991a6598d9fb9f'} + credentials = { + 'token': 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' + 'MTUwODI0L1JlZ2lvbk9uZS9zdHMvYXdzNF9yZXF1ZXN0CmYy' + 'MjE1NTgwZWViOWExNjczNTFiZDkzZTg2YzNiNmYwNGE5Mjhm' + 'NWM1NTIwYTM5MzVhNDUzNTQwYTA5NTY0YjU=', + 'signature': '3aa0b6f1414b92b2a32584068f83c6d09b7f' + 'daa11d4ea58912bbf1d8616ef56d', + } - self.assertIsNone(s3tokens.S3Resource._check_signature( - creds_ref, credentials)) + self.assertIsNone( + s3tokens.S3Resource._check_signature(creds_ref, credentials) + ) def test_bad_signature_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} + creds_ref = {'secret': u'e7a7a2240136494986991a6598d9fb9f'} # the signature is wrong on an otherwise correctly formed token - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy' - 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1' - 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==', - 'signature': uuid.uuid4().hex} + credentials = { + 'token': 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' + 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy' + 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1' + 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==', + 'signature': uuid.uuid4().hex, + } - self.assertRaises(exception.Unauthorized, - s3tokens.S3Resource._check_signature, - creds_ref, credentials) + self.assertRaises( + exception.Unauthorized, + s3tokens.S3Resource._check_signature, + creds_ref, + credentials, + ) def test_bad_service_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} + creds_ref = {'secret': u'e7a7a2240136494986991a6598d9fb9f'} # use 'bad' as the service scope instead of a recognised service - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9iYWQvYXdzNF9yZXF1ZXN0CmYy' - 'MjE1NTgwZWViOWExNjczNTFiZDkzZTg2YzNiNmYwNGE5Mjhm' - 'NWM1NTIwYTM5MzVhNDUzNTQwYTA5NTY0YjU=', - 'signature': - '1a2dec50eb1bba97887d1103c2ead6a39911' - '98c4be2537cf14d40b64cceb888b'} + credentials = { + 'token': 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' + 'MTUwODI0L1JlZ2lvbk9uZS9iYWQvYXdzNF9yZXF1ZXN0CmYy' + 'MjE1NTgwZWViOWExNjczNTFiZDkzZTg2YzNiNmYwNGE5Mjhm' + 'NWM1NTIwYTM5MzVhNDUzNTQwYTA5NTY0YjU=', + 'signature': '1a2dec50eb1bba97887d1103c2ead6a39911' + '98c4be2537cf14d40b64cceb888b', + } - self.assertRaises(exception.Unauthorized, - s3tokens.S3Resource._check_signature, - creds_ref, credentials) + self.assertRaises( + exception.Unauthorized, + s3tokens.S3Resource._check_signature, + creds_ref, + credentials, + ) def test_bad_signing_key_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} + creds_ref = {'secret': u'e7a7a2240136494986991a6598d9fb9f'} # signed with aws4_badrequest instead of aws4_request - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy' - 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1' - 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==', - 'signature': - '52d02211a3767d00b2104ab28c9859003b0e' - '9c8735cd10de7975f3b1212cca41'} + credentials = { + 'token': 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' + 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy' + 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1' + 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==', + 'signature': '52d02211a3767d00b2104ab28c9859003b0e' + '9c8735cd10de7975f3b1212cca41', + } - self.assertRaises(exception.Unauthorized, - s3tokens.S3Resource._check_signature, - creds_ref, credentials) + self.assertRaises( + exception.Unauthorized, + s3tokens.S3Resource._check_signature, + creds_ref, + credentials, + ) def test_bad_short_scope_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} + creds_ref = {'secret': u'e7a7a2240136494986991a6598d9fb9f'} # credential scope has too few parts, missing final /aws4_request - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9zMwpmMjIxNTU4MGVlYjlhMTY3' - 'MzUxYmQ5M2U4NmMzYjZmMDRhOTI4ZjVjNTUyMGEzOTM1YTQ1' - 'MzU0MGEwOTU2NGI1', - 'signature': - '28a075f1ee41e96c431153914998443ff0f5' - '5fe93d31b37181f13ff4865942a2'} + credentials = { + 'token': 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' + 'MTUwODI0L1JlZ2lvbk9uZS9zMwpmMjIxNTU4MGVlYjlhMTY3' + 'MzUxYmQ5M2U4NmMzYjZmMDRhOTI4ZjVjNTUyMGEzOTM1YTQ1' + 'MzU0MGEwOTU2NGI1', + 'signature': '28a075f1ee41e96c431153914998443ff0f5' + '5fe93d31b37181f13ff4865942a2', + } - self.assertRaises(exception.Unauthorized, - s3tokens.S3Resource._check_signature, - creds_ref, credentials) + self.assertRaises( + exception.Unauthorized, + s3tokens.S3Resource._check_signature, + creds_ref, + credentials, + ) def test_bad_token_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} + creds_ref = {'secret': u'e7a7a2240136494986991a6598d9fb9f'} # token has invalid format of first part - credentials = {'token': - 'QVdTNC1BQUEKWApYClg=', - 'signature': ''} - self.assertRaises(exception.Unauthorized, - s3tokens.S3Resource._check_signature, - creds_ref, credentials) + credentials = {'token': 'QVdTNC1BQUEKWApYClg=', 'signature': ''} + self.assertRaises( + exception.Unauthorized, + s3tokens.S3Resource._check_signature, + creds_ref, + credentials, + ) # token has invalid format of scope - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgpYCi8vczMvYXdzTl9yZXF1ZXN0Clg=', - 'signature': ''} - self.assertRaises(exception.Unauthorized, - s3tokens.S3Resource._check_signature, - creds_ref, credentials) + credentials = { + 'token': 'QVdTNC1ITUFDLVNIQTI1NgpYCi8vczMvYXdzTl9yZXF1ZXN0Clg=', + 'signature': '', + } + self.assertRaises( + exception.Unauthorized, + s3tokens.S3Resource._check_signature, + creds_ref, + credentials, + ) diff --git a/keystone/tests/unit/test_contrib_simple_cert.py b/keystone/tests/unit/test_contrib_simple_cert.py index 6f13bc825f..c4d0630e1f 100644 --- a/keystone/tests/unit/test_contrib_simple_cert.py +++ b/keystone/tests/unit/test_contrib_simple_cert.py @@ -24,10 +24,12 @@ class BaseTestCase(test_v3.RestfulTestCase): class TestSimpleCert(BaseTestCase): def request_cert(self, path): - self.request(app=self.public_app, - method='GET', - path=path, - expected_status=http.client.GONE) + self.request( + app=self.public_app, + method='GET', + path=path, + expected_status=http.client.GONE, + ) def test_ca_cert(self): self.request_cert(self.CA_PATH) diff --git a/keystone/tests/unit/test_exception.py b/keystone/tests/unit/test_exception.py index e36f3c8fe1..c1ef5c30ca 100644 --- a/keystone/tests/unit/test_exception.py +++ b/keystone/tests/unit/test_exception.py @@ -91,16 +91,16 @@ class ExceptionTestCase(unit.BaseTestCase): self.fail("unicode error message not supported") def test_unicode_string(self): - e = exception.ValidationError(attribute='xx', - target='Long \xe2\x80\x93 Dash') + e = exception.ValidationError( + attribute='xx', target='Long \xe2\x80\x93 Dash' + ) self.assertIn('Long \xe2\x80\x93 Dash', str(e)) def test_invalid_unicode_string(self): # NOTE(jamielennox): This is a complete failure case so what is # returned in the exception message is not that important so long # as there is an error with a message - e = exception.ValidationError(attribute='xx', - target='\xe7a va') + e = exception.ValidationError(attribute='xx', target='\xe7a va') self.assertIn('\xe7a va', str(e)) @@ -127,10 +127,8 @@ class UnexpectedExceptionTestCase(ExceptionTestCase): def test_unexpected_error_subclass_no_debug(self): self.config_fixture.config(debug=False) - e = UnexpectedExceptionTestCase.SubClassExc( - debug_info=self.exc_str) - self.assertEqual(exception.UnexpectedError.message_format, - str(e)) + e = UnexpectedExceptionTestCase.SubClassExc(debug_info=self.exc_str) + self.assertEqual(exception.UnexpectedError.message_format, str(e)) def test_unexpected_error_subclass_debug(self): self.config_fixture.config(debug=True, insecure_debug=True) @@ -139,39 +137,37 @@ class UnexpectedExceptionTestCase(ExceptionTestCase): e = subclass(debug_info=self.exc_str) expected = subclass.debug_message_format % {'debug_info': self.exc_str} self.assertEqual( - '%s %s' % (expected, exception.SecurityError.amendment), - str(e)) + '%s %s' % (expected, exception.SecurityError.amendment), str(e) + ) def test_unexpected_error_custom_message_no_debug(self): self.config_fixture.config(debug=False) e = exception.UnexpectedError(self.exc_str) - self.assertEqual(exception.UnexpectedError.message_format, - str(e)) + self.assertEqual(exception.UnexpectedError.message_format, str(e)) def test_unexpected_error_custom_message_debug(self): self.config_fixture.config(debug=True, insecure_debug=True) e = exception.UnexpectedError(self.exc_str) self.assertEqual( - '%s %s' % (self.exc_str, exception.SecurityError.amendment), - str(e)) + '%s %s' % (self.exc_str, exception.SecurityError.amendment), str(e) + ) def test_unexpected_error_custom_message_exception_debug(self): self.config_fixture.config(debug=True, insecure_debug=True) orig_e = exception.NotFound(target=uuid.uuid4().hex) e = exception.UnexpectedError(orig_e) self.assertEqual( - '%s %s' % (str(orig_e), - exception.SecurityError.amendment), - str(e)) + '%s %s' % (str(orig_e), exception.SecurityError.amendment), str(e) + ) def test_unexpected_error_custom_message_binary_debug(self): self.config_fixture.config(debug=True, insecure_debug=True) binary_msg = b'something' e = exception.UnexpectedError(binary_msg) self.assertEqual( - '%s %s' % (str(binary_msg), - exception.SecurityError.amendment), - str(e)) + '%s %s' % (str(binary_msg), exception.SecurityError.amendment), + str(e), + ) class SecurityErrorTestCase(ExceptionTestCase): @@ -283,7 +279,8 @@ class TestSecurityErrorTranslation(unit.BaseTestCase): exception._FATAL_EXCEPTION_FORMAT_ERRORS = False self.addCleanup( - setattr, exception, '_FATAL_EXCEPTION_FORMAT_ERRORS', True) + setattr, exception, '_FATAL_EXCEPTION_FORMAT_ERRORS', True + ) class CustomSecurityError(exception.SecurityError): message_format = 'We had a failure in the %(place)r' diff --git a/keystone/tests/unit/test_hacking_checks.py b/keystone/tests/unit/test_hacking_checks.py index 9afe4d4e34..3220fb01af 100644 --- a/keystone/tests/unit/test_hacking_checks.py +++ b/keystone/tests/unit/test_hacking_checks.py @@ -28,8 +28,9 @@ class BaseStyleCheck(unit.BaseTestCase): def get_checker(self): """Return the checker to be used for tests in this class.""" - raise NotImplementedError('subclasses must provide ' - 'a real implementation') + raise NotImplementedError( + 'subclasses must provide ' 'a real implementation' + ) def get_fixture(self): return hacking_fixtures.HackingCode() @@ -87,8 +88,9 @@ class TestTranslationChecks(BaseStyleCheck): actual_errors = (e[:3] for e in self.run_check(code)) # adjust line numbers to make the fixture data more readable. import_lines = len(self.code_ex.shared_imports.split('\n')) - 1 - actual_errors = [(e[0] - import_lines, e[1], e[2]) - for e in actual_errors] + actual_errors = [ + (e[0] - import_lines, e[1], e[2]) for e in actual_errors + ] self.assertEqual(expected_errors or [], actual_errors) def test_for_translations(self): diff --git a/keystone/tests/unit/test_ldap_livetest.py b/keystone/tests/unit/test_ldap_livetest.py index 3f9fb53143..e0884dbcb1 100644 --- a/keystone/tests/unit/test_ldap_livetest.py +++ b/keystone/tests/unit/test_ldap_livetest.py @@ -47,28 +47,42 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity): def clear_database(self): devnull = open('/dev/null', 'w') - subprocess.call(['ldapdelete', - '-x', - '-D', CONF.ldap.user, - '-H', CONF.ldap.url, - '-w', CONF.ldap.password, - '-r', CONF.ldap.suffix], - stderr=devnull) + subprocess.call( + [ + 'ldapdelete', + '-x', + '-D', + CONF.ldap.user, + '-H', + CONF.ldap.url, + '-w', + CONF.ldap.password, + '-r', + CONF.ldap.suffix, + ], + stderr=devnull, + ) if CONF.ldap.suffix.startswith('ou='): - tree_dn_attrs = {'objectclass': 'organizationalUnit', - 'ou': 'openstack'} + tree_dn_attrs = { + 'objectclass': 'organizationalUnit', + 'ou': 'openstack', + } else: - tree_dn_attrs = {'objectclass': ['dcObject', 'organizationalUnit'], - 'dc': 'openstack', - 'ou': 'openstack'} + tree_dn_attrs = { + 'objectclass': ['dcObject', 'organizationalUnit'], + 'dc': 'openstack', + 'ou': 'openstack', + } create_object(CONF.ldap.suffix, tree_dn_attrs) - create_object(CONF.ldap.user_tree_dn, - {'objectclass': 'organizationalUnit', - 'ou': 'Users'}) - create_object(CONF.ldap.group_tree_dn, - {'objectclass': 'organizationalUnit', - 'ou': 'UserGroups'}) + create_object( + CONF.ldap.user_tree_dn, + {'objectclass': 'organizationalUnit', 'ou': 'Users'}, + ) + create_object( + CONF.ldap.group_tree_dn, + {'objectclass': 'organizationalUnit', 'ou': 'UserGroups'}, + ) def config_files(self): config_files = super(LiveLDAPIdentity, self).config_files() @@ -83,31 +97,42 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity): self.assertEqual(CONF.ldap.user_tree_dn, user_api.tree_dn) def test_ldap_dereferencing(self): - alt_users_ldif = {'objectclass': ['top', 'organizationalUnit'], - 'ou': 'alt_users'} - alt_fake_user_ldif = {'objectclass': ['person', 'inetOrgPerson'], - 'cn': 'alt_fake1', - 'sn': 'alt_fake1'} - aliased_users_ldif = {'objectclass': ['alias', 'extensibleObject'], - 'aliasedobjectname': "ou=alt_users,%s" % - CONF.ldap.suffix} + alt_users_ldif = { + 'objectclass': ['top', 'organizationalUnit'], + 'ou': 'alt_users', + } + alt_fake_user_ldif = { + 'objectclass': ['person', 'inetOrgPerson'], + 'cn': 'alt_fake1', + 'sn': 'alt_fake1', + } + aliased_users_ldif = { + 'objectclass': ['alias', 'extensibleObject'], + 'aliasedobjectname': "ou=alt_users,%s" % CONF.ldap.suffix, + } create_object("ou=alt_users,%s" % CONF.ldap.suffix, alt_users_ldif) - create_object("%s=alt_fake1,ou=alt_users,%s" % - (CONF.ldap.user_id_attribute, CONF.ldap.suffix), - alt_fake_user_ldif) - create_object("ou=alt_users,%s" % CONF.ldap.user_tree_dn, - aliased_users_ldif) + create_object( + "%s=alt_fake1,ou=alt_users,%s" + % (CONF.ldap.user_id_attribute, CONF.ldap.suffix), + alt_fake_user_ldif, + ) + create_object( + "ou=alt_users,%s" % CONF.ldap.user_tree_dn, aliased_users_ldif + ) - self.config_fixture.config(group='ldap', - query_scope='sub', - alias_dereferencing='never') + self.config_fixture.config( + group='ldap', query_scope='sub', alias_dereferencing='never' + ) PROVIDERS.identity_api = identity_ldap.Identity() - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, - 'alt_fake1') + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.get_user, + 'alt_fake1', + ) - self.config_fixture.config(group='ldap', - alias_dereferencing='searching') + self.config_fixture.config( + group='ldap', alias_dereferencing='searching' + ) PROVIDERS.identity_api = identity_ldap.Identity() user_ref = PROVIDERS.identity_api.get_user('alt_fake1') self.assertEqual('alt_fake1', user_ref['id']) @@ -131,7 +156,8 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity): for x in range(0, USER_COUNT): group_refs = PROVIDERS.identity_api.list_groups_for_user( - test_users[x]['id']) + test_users[x]['id'] + ) self.assertEqual(0, len(group_refs)) for x in range(0, GROUP_COUNT): @@ -140,53 +166,65 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity): test_groups.append(new_group) group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(x, len(group_refs)) PROVIDERS.identity_api.add_user_to_group( - positive_user['id'], - new_group['id']) + positive_user['id'], new_group['id'] + ) group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(x + 1, len(group_refs)) group_refs = PROVIDERS.identity_api.list_groups_for_user( - negative_user['id']) + negative_user['id'] + ) self.assertEqual(0, len(group_refs)) driver = PROVIDERS.identity_api._select_identity_driver( - CONF.identity.default_domain_id) + CONF.identity.default_domain_id + ) driver.group.ldap_filter = '(dn=xx)' group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(0, len(group_refs)) group_refs = PROVIDERS.identity_api.list_groups_for_user( - negative_user['id']) + negative_user['id'] + ) self.assertEqual(0, len(group_refs)) driver.group.ldap_filter = '(objectclass=*)' group_refs = PROVIDERS.identity_api.list_groups_for_user( - positive_user['id']) + positive_user['id'] + ) self.assertEqual(GROUP_COUNT, len(group_refs)) group_refs = PROVIDERS.identity_api.list_groups_for_user( - negative_user['id']) + negative_user['id'] + ) self.assertEqual(0, len(group_refs)) def test_user_enable_attribute_mask(self): self.config_fixture.config( group='ldap', user_enabled_emulation=False, - user_enabled_attribute='employeeType') + user_enabled_attribute='employeeType', + ) super(LiveLDAPIdentity, self).test_user_enable_attribute_mask() def test_create_project_case_sensitivity(self): # The attribute used for the live LDAP tests is case insensitive. def call_super(): - (super(LiveLDAPIdentity, self). - test_create_project_case_sensitivity()) + ( + super( + LiveLDAPIdentity, self + ).test_create_project_case_sensitivity() + ) self.assertRaises(exception.Conflict, call_super) @@ -202,7 +240,10 @@ class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity): # The description attribute doesn't allow an empty value. def call_super(): - (super(LiveLDAPIdentity, self). - test_project_update_missing_attrs_with_a_falsey_value()) + ( + super( + LiveLDAPIdentity, self + ).test_project_update_missing_attrs_with_a_falsey_value() + ) self.assertRaises(ldap.INVALID_SYNTAX, call_super) diff --git a/keystone/tests/unit/test_ldap_pool_livetest.py b/keystone/tests/unit/test_ldap_pool_livetest.py index b0817e9c1a..6fe887f196 100644 --- a/keystone/tests/unit/test_ldap_pool_livetest.py +++ b/keystone/tests/unit/test_ldap_pool_livetest.py @@ -28,8 +28,10 @@ CONF = keystone.conf.CONF PROVIDERS = provider_api.ProviderAPIs -class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin, - test_ldap_livetest.LiveLDAPIdentity): +class LiveLDAPPoolIdentity( + test_backend_ldap_pool.LdapPoolCommonTestMixin, + test_ldap_livetest.LiveLDAPIdentity, +): """Executes existing LDAP live test with pooled LDAP handler. Also executes common pool specific tests via Mixin class. @@ -49,10 +51,12 @@ class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin, def test_assert_connector_used_not_fake_ldap_pool(self): handler = ldap_common._get_connection(CONF.ldap.url, use_pool=True) - self.assertNotEqual(type(handler.Connector), - type(fakeldap.FakeLdapPool)) - self.assertEqual(type(ldappool.StateConnector), - type(handler.Connector)) + self.assertNotEqual( + type(handler.Connector), type(fakeldap.FakeLdapPool) + ) + self.assertEqual( + type(ldappool.StateConnector), type(handler.Connector) + ) def test_async_search_and_result3(self): self.config_fixture.config(group='ldap', page_size=1) @@ -98,27 +102,32 @@ class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin, self.test_password_change_with_pool() - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - context={}, - user_id=self.user_sna['id'], - password=old_password) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + context={}, + user_id=self.user_sna['id'], + password=old_password, + ) def _create_user_and_authenticate(self, password): - user = unit.create_user(PROVIDERS.identity_api, - CONF.identity.default_domain_id, - password=password) + user = unit.create_user( + PROVIDERS.identity_api, + CONF.identity.default_domain_id, + password=password, + ) with self.make_request(): PROVIDERS.identity_api.authenticate( - user_id=user['id'], - password=password) + user_id=user['id'], password=password + ) return PROVIDERS.identity_api.get_user(user['id']) def _get_auth_conn_pool_cm(self): pool_url = ( - ldap_common.PooledLDAPHandler.auth_pool_prefix + CONF.ldap.url) + ldap_common.PooledLDAPHandler.auth_pool_prefix + CONF.ldap.url + ) return self.conn_pools[pool_url] def _do_password_change_for_one_user(self, password, new_password): @@ -167,12 +176,14 @@ class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin, return user3 def test_password_change_with_auth_pool_enabled_long_lifetime(self): - self.config_fixture.config(group='ldap', - auth_pool_connection_lifetime=600) + self.config_fixture.config( + group='ldap', auth_pool_connection_lifetime=600 + ) old_password = 'my_password' new_password = 'new_password' - user = self._do_password_change_for_one_user(old_password, - new_password) + user = self._do_password_change_for_one_user( + old_password, new_password + ) user.pop('password') # with long connection lifetime auth_pool can bind to old password @@ -181,21 +192,27 @@ class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin, # This can happen in multiple concurrent connections case only. with self.make_request(): user_ref = PROVIDERS.identity_api.authenticate( - user_id=user['id'], password=old_password) + user_id=user['id'], password=old_password + ) self.assertDictEqual(user, user_ref) def test_password_change_with_auth_pool_enabled_no_lifetime(self): - self.config_fixture.config(group='ldap', - auth_pool_connection_lifetime=0) + self.config_fixture.config( + group='ldap', auth_pool_connection_lifetime=0 + ) old_password = 'my_password' new_password = 'new_password' - user = self._do_password_change_for_one_user(old_password, - new_password) + user = self._do_password_change_for_one_user( + old_password, new_password + ) # now as connection lifetime is zero, so authentication # with old password will always fail. - self.assertRaises(AssertionError, - PROVIDERS.identity_api.authenticate, - context={}, user_id=user['id'], - password=old_password) + self.assertRaises( + AssertionError, + PROVIDERS.identity_api.authenticate, + context={}, + user_id=user['id'], + password=old_password, + ) diff --git a/keystone/tests/unit/test_ldap_tls_livetest.py b/keystone/tests/unit/test_ldap_tls_livetest.py index b1541c5d44..ebe1461117 100644 --- a/keystone/tests/unit/test_ldap_tls_livetest.py +++ b/keystone/tests/unit/test_ldap_tls_livetest.py @@ -46,14 +46,20 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity): return config_files def test_tls_certfile_demand_option(self): - self.config_fixture.config(group='ldap', - use_tls=True, - tls_cacertdir=None, - tls_req_cert='demand') + self.config_fixture.config( + group='ldap', + use_tls=True, + tls_cacertdir=None, + tls_req_cert='demand', + ) PROVIDERS.identity_api = identity.backends.ldap.Identity() - user = unit.create_user(PROVIDERS.identity_api, 'default', - name='fake1', password='fakepass1') + user = unit.create_user( + PROVIDERS.identity_api, + 'default', + name='fake1', + password='fakepass1', + ) user_ref = PROVIDERS.identity_api.get_user(user['id']) self.assertEqual(user['id'], user_ref['id']) @@ -62,21 +68,25 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity): PROVIDERS.identity_api.delete_user(user['id']) self.assertRaises( - exception.UserNotFound, - PROVIDERS.identity_api.get_user, - user['id'] + exception.UserNotFound, PROVIDERS.identity_api.get_user, user['id'] ) def test_tls_certdir_demand_option(self): - self.config_fixture.config(group='ldap', - use_tls=True, - tls_cacertdir=None, - tls_req_cert='demand') + self.config_fixture.config( + group='ldap', + use_tls=True, + tls_cacertdir=None, + tls_req_cert='demand', + ) PROVIDERS.identity_api = identity.backends.ldap.Identity() - user = unit.create_user(PROVIDERS.identity_api, 'default', - id='fake1', name='fake1', - password='fakepass1') + user = unit.create_user( + PROVIDERS.identity_api, + 'default', + id='fake1', + name='fake1', + password='fakepass1', + ) user_ref = PROVIDERS.identity_api.get_user('fake1') self.assertEqual('fake1', user_ref['id']) @@ -85,9 +95,7 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity): PROVIDERS.identity_api.delete_user('fake1') self.assertRaises( - exception.UserNotFound, - PROVIDERS.identity_api.get_user, - 'fake1' + exception.UserNotFound, PROVIDERS.identity_api.get_user, 'fake1' ) def test_tls_bad_certfile(self): @@ -96,7 +104,8 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity): use_tls=True, tls_req_cert='demand', tls_cacertfile='/etc/keystone/ssl/certs/mythicalcert.pem', - tls_cacertdir=None) + tls_cacertdir=None, + ) PROVIDERS.identity_api = identity.backends.ldap.Identity() user = unit.new_user_ref('default') @@ -108,7 +117,8 @@ class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity): use_tls=True, tls_cacertfile=None, tls_req_cert='demand', - tls_cacertdir='/etc/keystone/ssl/mythicalcertdir') + tls_cacertdir='/etc/keystone/ssl/mythicalcertdir', + ) PROVIDERS.identity_api = identity.backends.ldap.Identity() user = unit.new_user_ref('default') diff --git a/keystone/tests/unit/test_limits.py b/keystone/tests/unit/test_limits.py index 9163c346a2..b4e2dbb855 100644 --- a/keystone/tests/unit/test_limits.py +++ b/keystone/tests/unit/test_limits.py @@ -35,7 +35,7 @@ class LimitModelTestCase(test_v3.RestfulTestCase): 'type': 'object', 'properties': { 'name': {'type': 'string'}, - 'description': {'type': 'string'} + 'description': {'type': 'string'}, }, 'required': ['name', 'description'], 'additionalProperties': False, @@ -60,21 +60,23 @@ class LimitModelTestCase(test_v3.RestfulTestCase): 'description': ( 'Limit enforcement and validation does not take project ' 'hierarchy into consideration.' - ) + ), } } self.assertDictEqual(expected, model) def test_get_limit_model_without_token_fails(self): self.get( - '/limits/model', noauth=True, - expected_status=http.client.UNAUTHORIZED + '/limits/model', + noauth=True, + expected_status=http.client.UNAUTHORIZED, ) def test_head_limit_model_without_token_fails(self): self.head( - '/limits/model', noauth=True, - expected_status=http.client.UNAUTHORIZED + '/limits/model', + noauth=True, + expected_status=http.client.UNAUTHORIZED, ) @@ -98,25 +100,35 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): self.region2 = response.json_body['region'] self.region_id2 = self.region2['id'] - service_ref = {'service': { - 'name': uuid.uuid4().hex, - 'enabled': True, - 'type': 'type2'}} + service_ref = { + 'service': { + 'name': uuid.uuid4().hex, + 'enabled': True, + 'type': 'type2', + } + } response = self.post('/services', body=service_ref) self.service2 = response.json_body['service'] self.service_id2 = self.service2['id'] def test_create_registered_limit(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, region_id=self.region_id + ) r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) registered_limits = r.result['registered_limits'] - for key in ['service_id', 'region_id', 'resource_name', - 'default_limit', 'description']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'default_limit', + 'description', + ]: self.assertEqual(registered_limits[0][key], ref[key]) def test_create_registered_limit_without_region(self): @@ -125,38 +137,49 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) registered_limits = r.result['registered_limits'] for key in ['service_id', 'resource_name', 'default_limit']: self.assertEqual(registered_limits[0][key], ref[key]) self.assertIsNone(registered_limits[0].get('region_id')) def test_create_registered_without_description(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, region_id=self.region_id + ) ref.pop('description') r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) registered_limits = r.result['registered_limits'] - for key in ['service_id', 'region_id', 'resource_name', - 'default_limit']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'default_limit', + ]: self.assertEqual(registered_limits[0][key], ref[key]) self.assertIsNone(registered_limits[0]['description']) def test_create_multi_registered_limit(self): - ref1 = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - ref2 = unit.new_registered_limit_ref(service_id=self.service_id, - resource_name='snapshot') + ref1 = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + ref2 = unit.new_registered_limit_ref( + service_id=self.service_id, resource_name='snapshot' + ) r = self.post( '/registered_limits', body={'registered_limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) registered_limits = r.result['registered_limits'] for key in ['service_id', 'resource_name', 'default_limit']: self.assertEqual(registered_limits[0][key], ref1[key]) @@ -165,24 +188,28 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): self.assertIsNone(registered_limits[1].get('region_id')) def test_create_registered_limit_return_count(self): - ref1 = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id) + ref1 = unit.new_registered_limit_ref( + service_id=self.service_id, region_id=self.region_id + ) r = self.post( '/registered_limits', body={'registered_limits': [ref1]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) registered_limits = r.result['registered_limits'] self.assertEqual(1, len(registered_limits)) - ref2 = unit.new_registered_limit_ref(service_id=self.service_id2, - region_id=self.region_id2) + ref2 = unit.new_registered_limit_ref( + service_id=self.service_id2, region_id=self.region_id2 + ) ref3 = unit.new_registered_limit_ref(service_id=self.service_id2) r = self.post( '/registered_limits', body={'registered_limits': [ref2, ref3]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) registered_limits = r.result['registered_limits'] self.assertEqual(2, len(registered_limits)) @@ -196,63 +223,75 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): '/registered_limits', body={'registered_limits': [input_limit]}, token=self.system_admin_token, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_create_registered_limit_duplicate(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, region_id=self.region_id + ) self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CONFLICT) + expected_status=http.client.CONFLICT, + ) def test_update_registered_limit(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - default_limit=10) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + default_limit=10, + ) r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) update_ref = { 'service_id': self.service_id2, 'region_id': self.region_id2, 'resource_name': 'snapshot', 'default_limit': 5, - 'description': 'test description' + 'description': 'test description', } r = self.patch( '/registered_limits/%s' % r.result['registered_limits'][0]['id'], body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) new_registered_limits = r.result['registered_limit'] self.assertEqual(new_registered_limits['service_id'], self.service_id2) self.assertEqual(new_registered_limits['region_id'], self.region_id2) self.assertEqual(new_registered_limits['resource_name'], 'snapshot') self.assertEqual(new_registered_limits['default_limit'], 5) - self.assertEqual(new_registered_limits['description'], - 'test description') + self.assertEqual( + new_registered_limits['description'], 'test description' + ) def test_update_registered_limit_region_failed(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - resource_name='volume', - default_limit=10, - description='test description') + ref = unit.new_registered_limit_ref( + service_id=self.service_id, + resource_name='volume', + default_limit=10, + description='test description', + ) r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) update_ref = { 'region_id': self.region_id, } @@ -261,7 +300,8 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): '/registered_limits/%s' % registered_limit_id, body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) new_registered_limits = r.result['registered_limit'] self.assertEqual(self.region_id, new_registered_limits['region_id']) @@ -270,83 +310,94 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): '/registered_limits/%s' % registered_limit_id, body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_registered_limit_description(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - default_limit=10) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + default_limit=10, + ) r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) - update_ref = { - 'description': 'test description' - } + expected_status=http.client.CREATED, + ) + update_ref = {'description': 'test description'} registered_limit_id = r.result['registered_limits'][0]['id'] r = self.patch( '/registered_limits/%s' % registered_limit_id, body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) new_registered_limits = r.result['registered_limit'] - self.assertEqual(new_registered_limits['description'], - 'test description') + self.assertEqual( + new_registered_limits['description'], 'test description' + ) update_ref['description'] = '' r = self.patch( '/registered_limits/%s' % registered_limit_id, body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) new_registered_limits = r.result['registered_limit'] self.assertEqual(new_registered_limits['description'], '') def test_update_registered_limit_region_id_to_none(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - default_limit=10) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + default_limit=10, + ) r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) - update_ref = { - 'region_id': None - } + expected_status=http.client.CREATED, + ) + update_ref = {'region_id': None} registered_limit_id = r.result['registered_limits'][0]['id'] r = self.patch( '/registered_limits/%s' % registered_limit_id, body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) self.assertIsNone(r.result['registered_limit']['region_id']) def test_update_registered_limit_region_id_to_none_conflict(self): - ref1 = unit.new_registered_limit_ref(service_id=self.service_id, - resource_name='volume', - default_limit=10) - ref2 = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - default_limit=10) + ref1 = unit.new_registered_limit_ref( + service_id=self.service_id, + resource_name='volume', + default_limit=10, + ) + ref2 = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + default_limit=10, + ) self.post( '/registered_limits', body={'registered_limits': [ref1]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) r = self.post( '/registered_limits', body={'registered_limits': [ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - update_ref = { - 'region_id': None - } + update_ref = {'region_id': None} registered_limit_id = r.result['registered_limits'][0]['id'] # There is a registered limit with "service_id=self.service_id, # region_id=None" already. So update ref2's region_id to None will @@ -355,31 +406,36 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): '/registered_limits/%s' % registered_limit_id, body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.CONFLICT) + expected_status=http.client.CONFLICT, + ) def test_update_registered_limit_not_found(self): update_ref = { 'service_id': self.service_id, 'region_id': self.region_id, 'resource_name': 'snapshot', - 'default_limit': 5 + 'default_limit': 5, } self.patch( '/registered_limits/%s' % uuid.uuid4().hex, body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.NOT_FOUND) + expected_status=http.client.NOT_FOUND, + ) def test_update_registered_limit_with_invalid_input(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - default_limit=10) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + default_limit=10, + ) r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) reg_id = r.result['registered_limits'][0]['id'] update_ref1 = unit.new_registered_limit_ref(service_id='fake_id') @@ -387,72 +443,90 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): update_ref3 = unit.new_registered_limit_ref(resource_name=123) update_ref4 = unit.new_registered_limit_ref(region_id='fake_region') update_ref5 = unit.new_registered_limit_ref(description=123) - for input_limit in [update_ref1, update_ref2, update_ref3, - update_ref4, update_ref5]: + for input_limit in [ + update_ref1, + update_ref2, + update_ref3, + update_ref4, + update_ref5, + ]: self.patch( '/registered_limits/%s' % reg_id, body={'registered_limit': input_limit}, token=self.system_admin_token, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_registered_limit_with_referenced_limit(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - default_limit=10) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + default_limit=10, + ) r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) update_ref = { 'service_id': self.service_id2, 'region_id': self.region_id2, 'resource_name': 'snapshot', - 'default_limit': 5 + 'default_limit': 5, } self.patch( '/registered_limits/%s' % r.result['registered_limits'][0]['id'], body={'registered_limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_list_registered_limit(self): - r = self.get( - '/registered_limits', - expected_status=http.client.OK) + r = self.get('/registered_limits', expected_status=http.client.OK) self.assertEqual([], r.result.get('registered_limits')) - ref1 = unit.new_registered_limit_ref(service_id=self.service_id, - resource_name='test_resource', - region_id=self.region_id) - ref2 = unit.new_registered_limit_ref(service_id=self.service_id2, - resource_name='test_resource', - region_id=self.region_id2) + ref1 = unit.new_registered_limit_ref( + service_id=self.service_id, + resource_name='test_resource', + region_id=self.region_id, + ) + ref2 = unit.new_registered_limit_ref( + service_id=self.service_id2, + resource_name='test_resource', + region_id=self.region_id2, + ) r = self.post( '/registered_limits', body={'registered_limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) id1 = r.result['registered_limits'][0]['id'] - r = self.get( - '/registered_limits', - expected_status=http.client.OK) + r = self.get('/registered_limits', expected_status=http.client.OK) registered_limits = r.result['registered_limits'] self.assertEqual(len(registered_limits), 2) - for key in ['service_id', 'region_id', 'resource_name', - 'default_limit']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'default_limit', + ]: if registered_limits[0]['id'] == id1: self.assertEqual(registered_limits[0][key], ref1[key]) self.assertEqual(registered_limits[1][key], ref2[key]) @@ -462,97 +536,128 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase): r = self.get( '/registered_limits?service_id=%s' % self.service_id, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) registered_limits = r.result['registered_limits'] self.assertEqual(len(registered_limits), 1) - for key in ['service_id', 'region_id', 'resource_name', - 'default_limit']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'default_limit', + ]: self.assertEqual(registered_limits[0][key], ref1[key]) r = self.get( '/registered_limits?region_id=%s' % self.region_id2, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) registered_limits = r.result['registered_limits'] self.assertEqual(len(registered_limits), 1) - for key in ['service_id', 'region_id', 'resource_name', - 'default_limit']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'default_limit', + ]: self.assertEqual(registered_limits[0][key], ref2[key]) r = self.get( '/registered_limits?resource_name=test_resource', - expected_status=http.client.OK) + expected_status=http.client.OK, + ) registered_limits = r.result['registered_limits'] self.assertEqual(len(registered_limits), 2) def test_show_registered_limit(self): - ref1 = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id) - ref2 = unit.new_registered_limit_ref(service_id=self.service_id2, - region_id=self.region_id2) + ref1 = unit.new_registered_limit_ref( + service_id=self.service_id, region_id=self.region_id + ) + ref2 = unit.new_registered_limit_ref( + service_id=self.service_id2, region_id=self.region_id2 + ) r = self.post( '/registered_limits', body={'registered_limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) id1 = r.result['registered_limits'][0]['id'] self.get( - '/registered_limits/fake_id', - expected_status=http.client.NOT_FOUND) + '/registered_limits/fake_id', expected_status=http.client.NOT_FOUND + ) r = self.get( - '/registered_limits/%s' % id1, - expected_status=http.client.OK) + '/registered_limits/%s' % id1, expected_status=http.client.OK + ) registered_limit = r.result['registered_limit'] - for key in ['service_id', 'region_id', 'resource_name', - 'default_limit', 'description']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'default_limit', + 'description', + ]: self.assertEqual(registered_limit[key], ref1[key]) def test_delete_registered_limit(self): - ref1 = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id) - ref2 = unit.new_registered_limit_ref(service_id=self.service_id2, - region_id=self.region_id2) + ref1 = unit.new_registered_limit_ref( + service_id=self.service_id, region_id=self.region_id + ) + ref2 = unit.new_registered_limit_ref( + service_id=self.service_id2, region_id=self.region_id2 + ) r = self.post( '/registered_limits', body={'registered_limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) id1 = r.result['registered_limits'][0]['id'] - self.delete('/registered_limits/%s' % id1, - token=self.system_admin_token, - expected_status=http.client.NO_CONTENT) - self.delete('/registered_limits/fake_id', - token=self.system_admin_token, - expected_status=http.client.NOT_FOUND) - r = self.get( - '/registered_limits', - expected_status=http.client.OK) + self.delete( + '/registered_limits/%s' % id1, + token=self.system_admin_token, + expected_status=http.client.NO_CONTENT, + ) + self.delete( + '/registered_limits/fake_id', + token=self.system_admin_token, + expected_status=http.client.NOT_FOUND, + ) + r = self.get('/registered_limits', expected_status=http.client.OK) registered_limits = r.result['registered_limits'] self.assertEqual(len(registered_limits), 1) def test_delete_registered_limit_with_referenced_limit(self): - ref = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - default_limit=10) + ref = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + default_limit=10, + ) r = self.post( '/registered_limits', body={'registered_limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) id = r.result['registered_limits'][0]['id'] - self.delete('/registered_limits/%s' % id, - expected_status=http.client.FORBIDDEN) + self.delete( + '/registered_limits/%s' % id, expected_status=http.client.FORBIDDEN + ) class LimitsTestCase(test_v3.RestfulTestCase): @@ -591,27 +696,36 @@ class LimitsTestCase(test_v3.RestfulTestCase): self.region2 = response.json_body['region'] self.region_id2 = self.region2['id'] - service_ref = {'service': { - 'name': uuid.uuid4().hex, - 'enabled': True, - 'type': 'type2'}} + service_ref = { + 'service': { + 'name': uuid.uuid4().hex, + 'enabled': True, + 'type': 'type2', + } + } response = self.post('/services', body=service_ref) self.service2 = response.json_body['service'] self.service_id2 = self.service2['id'] - ref1 = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - ref2 = unit.new_registered_limit_ref(service_id=self.service_id2, - resource_name='snapshot') - ref3 = unit.new_registered_limit_ref(service_id=self.service_id, - region_id=self.region_id, - resource_name='backup') + ref1 = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + ref2 = unit.new_registered_limit_ref( + service_id=self.service_id2, resource_name='snapshot' + ) + ref3 = unit.new_registered_limit_ref( + service_id=self.service_id, + region_id=self.region_id, + resource_name='backup', + ) self.post( '/registered_limits', body={'registered_limits': [ref1, ref2, ref3]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) # Create more assignments, all are: # @@ -622,8 +736,9 @@ class LimitsTestCase(test_v3.RestfulTestCase): # self.user -- admin -- system self.project_2 = unit.new_project_ref(domain_id=self.domain_id) self.project_2_id = self.project_2['id'] - PROVIDERS.resource_api.create_project(self.project_2_id, - self.project_2) + PROVIDERS.resource_api.create_project( + self.project_2_id, self.project_2 + ) self.domain_2 = unit.new_domain_ref() self.domain_2_id = self.domain_2['id'] @@ -634,59 +749,84 @@ class LimitsTestCase(test_v3.RestfulTestCase): PROVIDERS.role_api.create_role(self.role_2_id, self.role_2) PROVIDERS.assignment_api.create_grant( - self.role_2_id, user_id=self.user_id, project_id=self.project_2_id) + self.role_2_id, user_id=self.user_id, project_id=self.project_2_id + ) PROVIDERS.assignment_api.create_grant( - self.role_id, user_id=self.user_id, domain_id=self.domain_id) + self.role_id, user_id=self.user_id, domain_id=self.domain_id + ) PROVIDERS.assignment_api.create_grant( - self.role_2_id, user_id=self.user_id, domain_id=self.domain_2_id) + self.role_2_id, user_id=self.user_id, domain_id=self.domain_2_id + ) PROVIDERS.assignment_api.create_system_grant_for_user( - self.user_id, self.role_id) + self.user_id, self.role_id + ) def test_create_project_limit(self): - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) r = self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) limits = r.result['limits'] self.assertIsNotNone(limits[0]['id']) self.assertIsNone(limits[0]['domain_id']) - for key in ['service_id', 'region_id', 'resource_name', - 'resource_limit', 'description', 'project_id']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'resource_limit', + 'description', + 'project_id', + ]: self.assertEqual(limits[0][key], ref[key]) def test_create_domain_limit(self): - ref = unit.new_limit_ref(domain_id=self.domain_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') + ref = unit.new_limit_ref( + domain_id=self.domain_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) r = self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) limits = r.result['limits'] self.assertIsNotNone(limits[0]['id']) self.assertIsNone(limits[0]['project_id']) - for key in ['service_id', 'region_id', 'resource_name', - 'resource_limit', 'description', 'domain_id']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'resource_limit', + 'description', + 'domain_id', + ]: self.assertEqual(limits[0][key], ref[key]) def test_create_limit_without_region(self): - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id2, - resource_name='snapshot') + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id2, + resource_name='snapshot', + ) r = self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) limits = r.result['limits'] self.assertIsNotNone(limits[0]['id']) @@ -696,49 +836,64 @@ class LimitsTestCase(test_v3.RestfulTestCase): self.assertIsNone(limits[0].get('region_id')) def test_create_limit_without_description(self): - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) ref.pop('description') r = self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) limits = r.result['limits'] self.assertIsNotNone(limits[0]['id']) self.assertIsNotNone(limits[0]['project_id']) - for key in ['service_id', 'region_id', 'resource_name', - 'resource_limit']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'resource_limit', + ]: self.assertEqual(limits[0][key], ref[key]) self.assertIsNone(limits[0]['description']) def test_create_limit_with_domain_as_project(self): - ref = unit.new_limit_ref(project_id=self.domain_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - r = self.post('/limits', body={'limits': [ref]}, - token=self.system_admin_token) + ref = unit.new_limit_ref( + project_id=self.domain_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + r = self.post( + '/limits', body={'limits': [ref]}, token=self.system_admin_token + ) limits = r.result['limits'] self.assertIsNone(limits[0]['project_id']) self.assertEqual(self.domain_id, limits[0]['domain_id']) def test_create_multi_limit(self): - ref1 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - ref2 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id2, - resource_name='snapshot') + ref1 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + ref2 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id2, + resource_name='snapshot', + ) r = self.post( '/limits', body={'limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) limits = r.result['limits'] for key in ['service_id', 'resource_name', 'resource_limit']: self.assertEqual(limits[0][key], ref1[key]) @@ -747,159 +902,184 @@ class LimitsTestCase(test_v3.RestfulTestCase): self.assertIsNone(limits[1].get('region_id')) def test_create_limit_return_count(self): - ref1 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') + ref1 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) r = self.post( '/limits', body={'limits': [ref1]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) limits = r.result['limits'] self.assertEqual(1, len(limits)) - ref2 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id2, - resource_name='snapshot') - ref3 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='backup') + ref2 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id2, + resource_name='snapshot', + ) + ref3 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='backup', + ) r = self.post( '/limits', body={'limits': [ref2, ref3]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) limits = r.result['limits'] self.assertEqual(2, len(limits)) def test_create_limit_with_invalid_input(self): - ref1 = unit.new_limit_ref(project_id=self.project_id, - resource_limit='not_int') - ref2 = unit.new_limit_ref(project_id=self.project_id, - resource_name=123) - ref3 = unit.new_limit_ref(project_id=self.project_id, - region_id='fake_region') + ref1 = unit.new_limit_ref( + project_id=self.project_id, resource_limit='not_int' + ) + ref2 = unit.new_limit_ref( + project_id=self.project_id, resource_name=123 + ) + ref3 = unit.new_limit_ref( + project_id=self.project_id, region_id='fake_region' + ) for input_limit in [ref1, ref2, ref3]: self.post( '/limits', body={'limits': [input_limit]}, token=self.system_admin_token, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_create_limit_duplicate(self): - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CONFLICT) + expected_status=http.client.CONFLICT, + ) def test_create_limit_without_reference_registered_limit(self): - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id2, - resource_name='volume') + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id2, + resource_name='volume', + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_update_limit(self): - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=10) + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=10, + ) r = self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) - update_ref = { - 'resource_limit': 5, - 'description': 'test description' - } + expected_status=http.client.CREATED, + ) + update_ref = {'resource_limit': 5, 'description': 'test description'} r = self.patch( '/limits/%s' % r.result['limits'][0]['id'], body={'limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) new_limits = r.result['limit'] self.assertEqual(new_limits['resource_limit'], 5) self.assertEqual(new_limits['description'], 'test description') def test_update_limit_not_found(self): - update_ref = { - 'resource_limit': 5 - } + update_ref = {'resource_limit': 5} self.patch( '/limits/%s' % uuid.uuid4().hex, body={'limit': update_ref}, token=self.system_admin_token, - expected_status=http.client.NOT_FOUND) + expected_status=http.client.NOT_FOUND, + ) def test_update_limit_with_invalid_input(self): - ref = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=10) + ref = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=10, + ) r = self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) limit_id = r.result['limits'][0]['id'] - invalid_resource_limit_update = { - 'resource_limit': 'not_int' - } - invalid_description_update = { - 'description': 123 - } - for input_limit in [invalid_resource_limit_update, - invalid_description_update]: + invalid_resource_limit_update = {'resource_limit': 'not_int'} + invalid_description_update = {'description': 123} + for input_limit in [ + invalid_resource_limit_update, + invalid_description_update, + ]: self.patch( '/limits/%s' % limit_id, body={'limit': input_limit}, token=self.system_admin_token, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_list_limit(self): r = self.get( '/limits', token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) self.assertEqual([], r.result.get('limits')) - ref1 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - ref2 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id2, - resource_name='snapshot') + ref1 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + ref2 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id2, + resource_name='snapshot', + ) r = self.post( '/limits', body={'limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) id1 = r.result['limits'][0]['id'] - r = self.get( - '/limits', - expected_status=http.client.OK) + r = self.get('/limits', expected_status=http.client.OK) limits = r.result['limits'] self.assertEqual(len(limits), 2) if limits[0]['id'] == id1: @@ -917,7 +1097,8 @@ class LimitsTestCase(test_v3.RestfulTestCase): r = self.get( '/limits?service_id=%s' % self.service_id2, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) limits = r.result['limits'] self.assertEqual(len(limits), 1) for key in ['service_id', 'resource_name', 'resource_limit']: @@ -925,38 +1106,51 @@ class LimitsTestCase(test_v3.RestfulTestCase): r = self.get( '/limits?region_id=%s' % self.region_id, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) limits = r.result['limits'] self.assertEqual(len(limits), 1) - for key in ['service_id', 'region_id', 'resource_name', - 'resource_limit']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'resource_limit', + ]: self.assertEqual(limits[0][key], ref1[key]) r = self.get( - '/limits?resource_name=volume', - expected_status=http.client.OK) + '/limits?resource_name=volume', expected_status=http.client.OK + ) limits = r.result['limits'] self.assertEqual(len(limits), 1) - for key in ['service_id', 'region_id', 'resource_name', - 'resource_limit']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'resource_limit', + ]: self.assertEqual(limits[0][key], ref1[key]) def test_list_limit_with_project_id_filter(self): # create two limit in different projects for test. - self.config_fixture.config(group='oslo_policy', - enforce_scope=True) - ref1 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - ref2 = unit.new_limit_ref(project_id=self.project_2_id, - service_id=self.service_id2, - resource_name='snapshot') + self.config_fixture.config(group='oslo_policy', enforce_scope=True) + ref1 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + ref2 = unit.new_limit_ref( + project_id=self.project_2_id, + service_id=self.service_id2, + resource_name='snapshot', + ) self.post( '/limits', body={'limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) # non system scoped request will get the limits in its project. r = self.get('/limits', expected_status=http.client.OK) @@ -965,10 +1159,14 @@ class LimitsTestCase(test_v3.RestfulTestCase): self.assertEqual(self.project_id, limits[0]['project_id']) r = self.get( - '/limits', expected_status=http.client.OK, + '/limits', + expected_status=http.client.OK, auth=self.build_authentication_request( - user_id=self.user['id'], password=self.user['password'], - project_id=self.project_2_id)) + user_id=self.user['id'], + password=self.user['password'], + project_id=self.project_2_id, + ), + ) limits = r.result['limits'] self.assertEqual(1, len(limits)) self.assertEqual(self.project_2_id, limits[0]['project_id']) @@ -976,7 +1174,8 @@ class LimitsTestCase(test_v3.RestfulTestCase): # any project user can filter by their own project r = self.get( '/limits?project_id=%s' % self.project_id, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) limits = r.result['limits'] self.assertEqual(1, len(limits)) self.assertEqual(self.project_id, limits[0]['project_id']) @@ -985,7 +1184,7 @@ class LimitsTestCase(test_v3.RestfulTestCase): r = self.get( '/limits?project_id=%s' % self.project_id, expected_status=http.client.OK, - token=self.system_admin_token + token=self.system_admin_token, ) limits = r.result['limits'] self.assertEqual(1, len(limits)) @@ -993,34 +1192,47 @@ class LimitsTestCase(test_v3.RestfulTestCase): def test_list_limit_with_domain_id_filter(self): # create two limit in different domains for test. - ref1 = unit.new_limit_ref(domain_id=self.domain_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - ref2 = unit.new_limit_ref(domain_id=self.domain_2_id, - service_id=self.service_id2, - resource_name='snapshot') + ref1 = unit.new_limit_ref( + domain_id=self.domain_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + ref2 = unit.new_limit_ref( + domain_id=self.domain_2_id, + service_id=self.service_id2, + resource_name='snapshot', + ) self.post( '/limits', body={'limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) # non system scoped request will get the limits in its domain. r = self.get( - '/limits', expected_status=http.client.OK, + '/limits', + expected_status=http.client.OK, auth=self.build_authentication_request( - user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain_id)) + user_id=self.user['id'], + password=self.user['password'], + domain_id=self.domain_id, + ), + ) limits = r.result['limits'] self.assertEqual(1, len(limits)) self.assertEqual(self.domain_id, limits[0]['domain_id']) r = self.get( - '/limits', expected_status=http.client.OK, + '/limits', + expected_status=http.client.OK, auth=self.build_authentication_request( - user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain_2_id)) + user_id=self.user['id'], + password=self.user['password'], + domain_id=self.domain_2_id, + ), + ) limits = r.result['limits'] self.assertEqual(1, len(limits)) self.assertEqual(self.domain_2_id, limits[0]['domain_id']) @@ -1029,7 +1241,8 @@ class LimitsTestCase(test_v3.RestfulTestCase): # will return an empty list. r = self.get( '/limits?domain_id=%s' % self.domain_id, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) limits = r.result['limits'] self.assertEqual(0, len(limits)) @@ -1038,88 +1251,124 @@ class LimitsTestCase(test_v3.RestfulTestCase): '/limits?domain_id=%s' % self.domain_id, expected_status=http.client.OK, auth=self.build_authentication_request( - user_id=self.user['id'], password=self.user['password'], - system=True)) + user_id=self.user['id'], + password=self.user['password'], + system=True, + ), + ) limits = r.result['limits'] self.assertEqual(1, len(limits)) self.assertEqual(self.domain_id, limits[0]['domain_id']) def test_show_project_limit(self): - ref1 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - ref2 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id2, - resource_name='snapshot') + ref1 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + ref2 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id2, + resource_name='snapshot', + ) r = self.post( '/limits', body={'limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) if r.result['limits'][0]['resource_name'] == 'volume': id1 = r.result['limits'][0]['id'] else: id1 = r.result['limits'][1]['id'] - self.get('/limits/fake_id', - token=self.system_admin_token, - expected_status=http.client.NOT_FOUND) - r = self.get('/limits/%s' % id1, - expected_status=http.client.OK) + self.get( + '/limits/fake_id', + token=self.system_admin_token, + expected_status=http.client.NOT_FOUND, + ) + r = self.get('/limits/%s' % id1, expected_status=http.client.OK) limit = r.result['limit'] self.assertIsNone(limit['domain_id']) - for key in ['service_id', 'region_id', 'resource_name', - 'resource_limit', 'description', 'project_id']: + for key in [ + 'service_id', + 'region_id', + 'resource_name', + 'resource_limit', + 'description', + 'project_id', + ]: self.assertEqual(limit[key], ref1[key]) def test_show_domain_limit(self): - ref1 = unit.new_limit_ref(domain_id=self.domain_id, - service_id=self.service_id2, - resource_name='snapshot') + ref1 = unit.new_limit_ref( + domain_id=self.domain_id, + service_id=self.service_id2, + resource_name='snapshot', + ) r = self.post( '/limits', body={'limits': [ref1]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) id1 = r.result['limits'][0]['id'] - r = self.get('/limits/%s' % id1, - expected_status=http.client.OK, - auth=self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain_id)) + r = self.get( + '/limits/%s' % id1, + expected_status=http.client.OK, + auth=self.build_authentication_request( + user_id=self.user['id'], + password=self.user['password'], + domain_id=self.domain_id, + ), + ) limit = r.result['limit'] self.assertIsNone(limit['project_id']) self.assertIsNone(limit['region_id']) - for key in ['service_id', 'resource_name', 'resource_limit', - 'description', 'domain_id']: + for key in [ + 'service_id', + 'resource_name', + 'resource_limit', + 'description', + 'domain_id', + ]: self.assertEqual(limit[key], ref1[key]) def test_delete_limit(self): - ref1 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume') - ref2 = unit.new_limit_ref(project_id=self.project_id, - service_id=self.service_id2, - resource_name='snapshot') + ref1 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + ) + ref2 = unit.new_limit_ref( + project_id=self.project_id, + service_id=self.service_id2, + resource_name='snapshot', + ) r = self.post( '/limits', body={'limits': [ref1, ref2]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) id1 = r.result['limits'][0]['id'] - self.delete('/limits/%s' % id1, - token=self.system_admin_token, - expected_status=http.client.NO_CONTENT) - self.delete('/limits/fake_id', - token=self.system_admin_token, - expected_status=http.client.NOT_FOUND) + self.delete( + '/limits/%s' % id1, + token=self.system_admin_token, + expected_status=http.client.NO_CONTENT, + ) + self.delete( + '/limits/fake_id', + token=self.system_admin_token, + expected_status=http.client.NOT_FOUND, + ) r = self.get( '/limits', token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) limits = r.result['limits'] self.assertEqual(len(limits), 1) @@ -1143,69 +1392,99 @@ class StrictTwoLevelLimitsTestCase(LimitsTestCase): domain_ref = {'domain': {'name': 'A', 'enabled': True}} response = self.post('/domains', body=domain_ref) self.domain_A = response.json_body['domain'] - project_ref = {'project': {'name': 'B', 'enabled': True, - 'domain_id': self.domain_A['id']}} + project_ref = { + 'project': { + 'name': 'B', + 'enabled': True, + 'domain_id': self.domain_A['id'], + } + } response = self.post('/projects', body=project_ref) self.project_B = response.json_body['project'] - project_ref = {'project': {'name': 'C', 'enabled': True, - 'domain_id': self.domain_A['id']}} + project_ref = { + 'project': { + 'name': 'C', + 'enabled': True, + 'domain_id': self.domain_A['id'], + } + } response = self.post('/projects', body=project_ref) self.project_C = response.json_body['project'] domain_ref = {'domain': {'name': 'D', 'enabled': True}} response = self.post('/domains', body=domain_ref) self.domain_D = response.json_body['domain'] - project_ref = {'project': {'name': 'E', 'enabled': True, - 'domain_id': self.domain_D['id']}} + project_ref = { + 'project': { + 'name': 'E', + 'enabled': True, + 'domain_id': self.domain_D['id'], + } + } response = self.post('/projects', body=project_ref) self.project_E = response.json_body['project'] - project_ref = {'project': {'name': 'F', 'enabled': True, - 'domain_id': self.domain_D['id']}} + project_ref = { + 'project': { + 'name': 'F', + 'enabled': True, + 'domain_id': self.domain_D['id'], + } + } response = self.post('/projects', body=project_ref) self.project_F = response.json_body['project'] def config_overrides(self): super(StrictTwoLevelLimitsTestCase, self).config_overrides() - self.config_fixture.config(group='unified_limit', - enforcement_model='strict_two_level') + self.config_fixture.config( + group='unified_limit', enforcement_model='strict_two_level' + ) def test_create_child_limit(self): # when A is 20, success to create B to 15, C to 18. # A,20 A,20 # / \ --> / \ # B C B,15 C,18 - ref = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=20) + ref = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=20, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=15) + ref = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=15, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=18) + ref = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=18, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) def test_create_child_limit_break_hierarchical_tree(self): # when A is 20, success to create B to 15, but fail to create C to 21. @@ -1216,38 +1495,47 @@ class StrictTwoLevelLimitsTestCase(LimitsTestCase): # A,20 A,20 # / \ -/-> / \ # B,15 C B,15 C,21 - ref = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=20) + ref = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=20, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=15) + ref = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=15, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=21) + ref = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=21, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_create_child_with_default_parent(self): # If A is not set, the default value is 10 (from registered limit). @@ -1259,122 +1547,153 @@ class StrictTwoLevelLimitsTestCase(LimitsTestCase): # A(10) A(10) # / \ -/-> / \ # B,5 C B,5 C,11 - ref = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=5) + ref = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=5, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=11) + ref = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=11, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_create_parent_limit(self): # When B is 9 , success to set A to 12 # A A,12 # / \ --> / \ # B,9 C B,9 C - ref = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=9) + ref = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=9, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=12) + ref = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=12, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) def test_create_parent_limit_break_hierarchical_tree(self): # When B is 9 , fail to set A to 8 # A A,8 # / \ -/-> / \ # B,9 C B,9 C - ref = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=9) + ref = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=9, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=8) + ref = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=8, + ) self.post( '/limits', body={'limits': [ref]}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_create_multi_limits(self): # success to create a tree in one request like: # A,12 D,9 # / \ / \ # B,9 C,5 E,5 F,4 - ref_A = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=12) - ref_B = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=9) - ref_C = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=5) - ref_D = unit.new_limit_ref(domain_id=self.domain_D['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=9) - ref_E = unit.new_limit_ref(project_id=self.project_E['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=5) - ref_F = unit.new_limit_ref(project_id=self.project_F['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=4) + ref_A = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=12, + ) + ref_B = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=9, + ) + ref_C = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=5, + ) + ref_D = unit.new_limit_ref( + domain_id=self.domain_D['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=9, + ) + ref_E = unit.new_limit_ref( + project_id=self.project_E['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=5, + ) + ref_F = unit.new_limit_ref( + project_id=self.project_F['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=4, + ) self.post( '/limits', body={'limits': [ref_A, ref_B, ref_C, ref_D, ref_E, ref_F]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) def test_create_multi_limits_invalid_input(self): # fail to create a tree in one request like: @@ -1382,41 +1701,54 @@ class StrictTwoLevelLimitsTestCase(LimitsTestCase): # / \ / \ # B,9 C,5 E,5 F,10 # because F will break the second limit tree. - ref_A = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=12) - ref_B = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=9) - ref_C = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=5) - ref_D = unit.new_limit_ref(domain_id=self.domain_D['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=9) - ref_E = unit.new_limit_ref(project_id=self.project_E['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=5) - ref_F = unit.new_limit_ref(project_id=self.project_F['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=10) + ref_A = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=12, + ) + ref_B = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=9, + ) + ref_C = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=5, + ) + ref_D = unit.new_limit_ref( + domain_id=self.domain_D['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=9, + ) + ref_E = unit.new_limit_ref( + project_id=self.project_E['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=5, + ) + ref_F = unit.new_limit_ref( + project_id=self.project_F['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=10, + ) self.post( '/limits', body={'limits': [ref_A, ref_B, ref_C, ref_D, ref_E, ref_F]}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_create_multi_limits_break_hierarchical_tree(self): # when there is some hierarchical_trees already like: @@ -1428,118 +1760,148 @@ class StrictTwoLevelLimitsTestCase(LimitsTestCase): # / \ / \ # B,9 C,5 E,5 F # because D will break the second limit tree. - ref_A = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=12) - ref_B = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=9) - ref_E = unit.new_limit_ref(project_id=self.project_E['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=5) + ref_A = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=12, + ) + ref_B = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=9, + ) + ref_E = unit.new_limit_ref( + project_id=self.project_E['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=5, + ) self.post( '/limits', body={'limits': [ref_A, ref_B, ref_E]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) - ref_C = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=5) - ref_D = unit.new_limit_ref(domain_id=self.domain_D['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=4) + ref_C = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=5, + ) + ref_D = unit.new_limit_ref( + domain_id=self.domain_D['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=4, + ) self.post( '/limits', body={'limits': [ref_C, ref_D]}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_update_child_limit(self): # Success to update C to 9 # A,10 A,10 # / \ --> / \ # B,6 C,7 B,6 C,9 - ref_A = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=10) - ref_B = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=6) - ref_C = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=7) + ref_A = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=10, + ) + ref_B = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=6, + ) + ref_C = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=7, + ) self.post( '/limits', body={'limits': [ref_A, ref_B]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) r = self.post( '/limits', body={'limits': [ref_C]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) update_dict = {'resource_limit': 9} self.patch( '/limits/%s' % r.result['limits'][0]['id'], body={'limit': update_dict}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) def test_update_child_limit_break_hierarchical_tree(self): # Fail to update C to 11 # A,10 A,10 # / \ -/-> / \ # B,6 C,7 B,6 C,11 - ref_A = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=10) - ref_B = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=6) - ref_C = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=7) + ref_A = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=10, + ) + ref_B = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=6, + ) + ref_C = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=7, + ) self.post( '/limits', body={'limits': [ref_A, ref_B]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) r = self.post( '/limits', body={'limits': [ref_C]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) update_dict = {'resource_limit': 11} self.patch( '/limits/%s' % r.result['limits'][0]['id'], body={'limit': update_dict}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_update_child_limit_with_default_parent(self): # If A is not set, the default value is 10 (from registered limit). @@ -1551,103 +1913,126 @@ class StrictTwoLevelLimitsTestCase(LimitsTestCase): # A,(10) A,(10) # / \ -/-> / \ # B, C,7 B C,11 - ref_C = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=7) + ref_C = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=7, + ) r = self.post( '/limits', body={'limits': [ref_C]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) update_dict = {'resource_limit': 9} self.patch( '/limits/%s' % r.result['limits'][0]['id'], body={'limit': update_dict}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) update_dict = {'resource_limit': 11} self.patch( '/limits/%s' % r.result['limits'][0]['id'], body={'limit': update_dict}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_update_parent_limit(self): # Success to update A to 8 # A,10 A,8 # / \ --> / \ # B,6 C,7 B,6 C,7 - ref_A = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=10) - ref_B = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=6) - ref_C = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=7) + ref_A = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=10, + ) + ref_B = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=6, + ) + ref_C = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=7, + ) r = self.post( '/limits', body={'limits': [ref_A]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) self.post( '/limits', body={'limits': [ref_B, ref_C]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) update_dict = {'resource_limit': 8} self.patch( '/limits/%s' % r.result['limits'][0]['id'], body={'limit': update_dict}, token=self.system_admin_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) def test_update_parent_limit_break_hierarchical_tree(self): # Fail to update A to 6 # A,10 A,6 # / \ -/-> / \ # B,6 C,7 B,6 C,7 - ref_A = unit.new_limit_ref(domain_id=self.domain_A['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=10) - ref_B = unit.new_limit_ref(project_id=self.project_B['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=6) - ref_C = unit.new_limit_ref(project_id=self.project_C['id'], - service_id=self.service_id, - region_id=self.region_id, - resource_name='volume', - resource_limit=7) + ref_A = unit.new_limit_ref( + domain_id=self.domain_A['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=10, + ) + ref_B = unit.new_limit_ref( + project_id=self.project_B['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=6, + ) + ref_C = unit.new_limit_ref( + project_id=self.project_C['id'], + service_id=self.service_id, + region_id=self.region_id, + resource_name='volume', + resource_limit=7, + ) r = self.post( '/limits', body={'limits': [ref_A]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) self.post( '/limits', body={'limits': [ref_B, ref_C]}, token=self.system_admin_token, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) update_dict = {'resource_limit': 6} self.patch( '/limits/%s' % r.result['limits'][0]['id'], body={'limit': update_dict}, token=self.system_admin_token, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) diff --git a/keystone/tests/unit/test_middleware.py b/keystone/tests/unit/test_middleware.py index 1c096564dd..1c06357937 100644 --- a/keystone/tests/unit/test_middleware.py +++ b/keystone/tests/unit/test_middleware.py @@ -45,22 +45,26 @@ class MiddlewareRequestTestBase(unit.TestCase): def _application(self): """A base wsgi application that returns a simple response.""" + def app(environ, start_response): # WSGI requires the body of the response to be bytes body = uuid.uuid4().hex.encode('utf-8') - resp_headers = [('Content-Type', 'text/html; charset=utf8'), - ('Content-Length', str(len(body)))] + resp_headers = [ + ('Content-Type', 'text/html; charset=utf8'), + ('Content-Length', str(len(body))), + ] start_response('200 OK', resp_headers) return [body] return app - def _generate_app_response(self, app, headers=None, method='get', - path='/', **kwargs): + def _generate_app_response( + self, app, headers=None, method='get', path='/', **kwargs + ): """Given a wsgi application wrap it in webtest and call it.""" - return getattr(webtest.TestApp(app), method)(path, - headers=headers or {}, - **kwargs) + return getattr(webtest.TestApp(app), method)( + path, headers=headers or {}, **kwargs + ) def _middleware_failure(self, exc, *args, **kwargs): """Assert that an exception is being thrown from process_request.""" @@ -80,9 +84,12 @@ class MiddlewareRequestTestBase(unit.TestCase): def fill_context(i_self, *i_args, **i_kwargs): # i_ to distinguish it from and not clobber the outer vars - e = self.assertRaises(exc, - super(_Failing, i_self).fill_context, - *i_args, **i_kwargs) + e = self.assertRaises( + exc, + super(_Failing, i_self).fill_context, + *i_args, + **i_kwargs + ) i_self._called = True raise e @@ -105,8 +112,9 @@ class MiddlewareRequestTestBase(unit.TestCase): return self._do_middleware_response(*args, **kwargs).request -class AuthContextMiddlewareTest(test_backend_sql.SqlTests, - MiddlewareRequestTestBase): +class AuthContextMiddlewareTest( + test_backend_sql.SqlTests, MiddlewareRequestTestBase +): MIDDLEWARE_CLASS = auth_context.AuthContextMiddleware @@ -115,8 +123,9 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, self.client_issuer = uuid.uuid4().hex self.untrusted_client_issuer = uuid.uuid4().hex self.trusted_issuer = self.client_issuer - self.config_fixture.config(group='tokenless_auth', - trusted_issuer=[self.trusted_issuer]) + self.config_fixture.config( + group='tokenless_auth', trusted_issuer=[self.trusted_issuer] + ) # client_issuer is encoded because you can't hash # unicode objects with hashlib. @@ -142,16 +151,15 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, PROVIDERS.resource_api.create_project(self.project_id, self.project) # 3) Create a user in new domain. - self.user = unit.new_user_ref(domain_id=self.domain_id, - project_id=self.project_id) + self.user = unit.new_user_ref( + domain_id=self.domain_id, project_id=self.project_id + ) self.user = PROVIDERS.identity_api.create_user(self.user) # Add IDP self.idp = self._idp_ref(id=self.idp_id) - PROVIDERS.federation_api.create_idp( - self.idp['id'], self.idp - ) + PROVIDERS.federation_api.create_idp(self.idp['id'], self.idp) # Add a role self.role = unit.new_role_ref() @@ -167,13 +175,15 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, PROVIDERS.assignment_api.add_role_to_user_and_project( user_id=self.user['id'], project_id=self.project_id, - role_id=self.role_id) + role_id=self.role_id, + ) # Assign a role to the group on a project PROVIDERS.assignment_api.create_grant( role_id=self.role_id, group_id=self.group['id'], - project_id=self.project_id) + project_id=self.project_id, + ) def _load_mapping_rules(self, rules): # Add a mapping @@ -192,14 +202,14 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, idp = { 'id': id or uuid.uuid4().hex, 'enabled': True, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } return idp def _proto_ref(self, mapping_id=None): proto = { 'id': uuid.uuid4().hex, - 'mapping_id': mapping_id or uuid.uuid4().hex + 'mapping_id': mapping_id or uuid.uuid4().hex, } return proto @@ -211,7 +221,7 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, return { 'id': uuid.uuid4().hex, 'rules': mapped_rules, - 'schema_version': "1.0" + 'schema_version': "1.0", } def _assert_tokenless_auth_context(self, context, ephemeral_user=False): @@ -220,15 +230,18 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, self.assertIn(self.role_name, context['roles']) if ephemeral_user: self.assertEqual(self.group['id'], context['group_ids'][0]) - self.assertEqual('ephemeral', - context[federation_constants.PROTOCOL]) - self.assertEqual(self.idp_id, - context[federation_constants.IDENTITY_PROVIDER]) + self.assertEqual( + 'ephemeral', context[federation_constants.PROTOCOL] + ) + self.assertEqual( + self.idp_id, context[federation_constants.IDENTITY_PROVIDER] + ) else: self.assertEqual(self.user['id'], context['user_id']) - def _assert_tokenless_request_context(self, request_context, - ephemeral_user=False): + def _assert_tokenless_request_context( + self, request_context, ephemeral_user=False + ): self.assertIsNotNone(request_context) self.assertEqual(self.project_id, request_context.project_id) self.assertIn(self.role_name, request_context.roles) @@ -239,8 +252,9 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, stub_value = uuid.uuid4().hex env = {authorization.AUTH_CONTEXT_ENV: stub_value} req = self._do_middleware_request(extra_environ=env) - self.assertEqual(stub_value, - req.environ.get(authorization.AUTH_CONTEXT_ENV)) + self.assertEqual( + stub_value, req.environ.get(authorization.AUTH_CONTEXT_ENV) + ) def test_not_applicable_to_token_request(self): req = self._do_middleware_request(path='/auth/tokens', method='post') @@ -265,40 +279,39 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, # references to issuer of the client certificate. env['SSL_CLIENT_I_DN'] = self.client_issuer env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) + self._middleware_failure( + exception.ValidationError, extra_environ=env, status=400 + ) def test_has_only_issuer_and_project_domain_name_request(self): env = {} env['SSL_CLIENT_I_DN'] = self.client_issuer env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) + self._middleware_failure( + exception.ValidationError, extra_environ=env, status=400 + ) def test_has_only_issuer_and_project_domain_id_request(self): env = {} env['SSL_CLIENT_I_DN'] = self.client_issuer env['HTTP_X_PROJECT_DOMAIN_ID'] = uuid.uuid4().hex - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) + self._middleware_failure( + exception.ValidationError, extra_environ=env, status=400 + ) def test_missing_both_domain_and_project_request(self): env = {} env['SSL_CLIENT_I_DN'] = self.client_issuer - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) + self._middleware_failure( + exception.ValidationError, extra_environ=env, status=400 + ) def test_empty_trusted_issuer_list(self): env = {} env['SSL_CLIENT_I_DN'] = self.client_issuer env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex - self.config_fixture.config(group='tokenless_auth', - trusted_issuer=[]) + self.config_fixture.config(group='tokenless_auth', trusted_issuer=[]) req = self._do_middleware_request(extra_environ=env) context = req.environ.get(authorization.AUTH_CONTEXT_ENV) @@ -324,7 +337,8 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME + ) req = self._do_middleware_request(extra_environ=env) context = req.environ.get(authorization.AUTH_CONTEXT_ENV) @@ -340,7 +354,8 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME + ) req = self._do_middleware_request(extra_environ=env) context = req.environ.get(authorization.AUTH_CONTEXT_ENV) @@ -357,7 +372,8 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME + ) req = self._do_middleware_request(extra_environ=env) context = req.environ.get(authorization.AUTH_CONTEXT_ENV) @@ -374,7 +390,8 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME + ) req = self._do_middleware_request(extra_environ=env) context = req.environ.get(authorization.AUTH_CONTEXT_ENV) @@ -390,11 +407,12 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME + ) - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) + self._middleware_failure( + exception.ValidationError, extra_environ=env, status=400 + ) def test_mapping_with_userid_and_domainid_success(self): env = {} @@ -405,7 +423,8 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINID) + mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINID + ) req = self._do_middleware_request(extra_environ=env) context = req.environ.get(authorization.AUTH_CONTEXT_ENV) @@ -422,7 +441,8 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINNAME) + mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINNAME + ) req = self._do_middleware_request(extra_environ=env) context = req.environ.get(authorization.AUTH_CONTEXT_ENV) @@ -439,7 +459,8 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID + ) req = self._do_middleware_request(extra_environ=env) context = req.environ.get(authorization.AUTH_CONTEXT_ENV) @@ -454,12 +475,11 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_DOMAINNAME_ONLY) + self._load_mapping_rules(mapping_fixtures.MAPPING_WITH_DOMAINNAME_ONLY) - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) + self._middleware_failure( + exception.ValidationError, extra_environ=env, status=400 + ) def test_only_domain_id_fail(self): env = {} @@ -468,12 +488,11 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_DOMAINID_ONLY) + self._load_mapping_rules(mapping_fixtures.MAPPING_WITH_DOMAINID_ONLY) - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) + self._middleware_failure( + exception.ValidationError, extra_environ=env, status=400 + ) def test_missing_domain_data_fail(self): env = {} @@ -482,12 +501,11 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_ONLY) + self._load_mapping_rules(mapping_fixtures.MAPPING_WITH_USERNAME_ONLY) - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) + self._middleware_failure( + exception.ValidationError, extra_environ=env, status=400 + ) def test_userid_success(self): env = {} @@ -513,13 +531,15 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, self.domain['enabled'] = False self.domain = PROVIDERS.resource_api.update_domain( - self.domain['id'], self.domain) + self.domain['id'], self.domain + ) self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID) - self._middleware_failure(exception.Unauthorized, - extra_environ=env, - status=401) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID + ) + self._middleware_failure( + exception.Unauthorized, extra_environ=env, status=401 + ) def test_user_disable_fail(self): env = {} @@ -535,10 +555,10 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, ) self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID + ) - self._middleware_failure(AssertionError, - extra_environ=env) + self._middleware_failure(AssertionError, extra_environ=env) def test_invalid_user_fail(self): env = {} @@ -549,11 +569,12 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) + mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME + ) - self._middleware_failure(exception.UserNotFound, - extra_environ=env, - status=404) + self._middleware_failure( + exception.UserNotFound, extra_environ=env, status=404 + ) def test_ephemeral_success(self): env = {} @@ -561,8 +582,9 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_NAME'] = self.project_name env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') + self.config_fixture.config( + group='tokenless_auth', protocol='ephemeral' + ) self.protocol_id = 'ephemeral' mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) mapping['rules'][0]['local'][0]['group']['id'] = self.group['id'] @@ -572,8 +594,9 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, context = req.environ.get(authorization.AUTH_CONTEXT_ENV) self._assert_tokenless_auth_context(context, ephemeral_user=True) request_context = req.environ.get(keystone_context.REQUEST_CONTEXT_ENV) - self._assert_tokenless_request_context(request_context, - ephemeral_user=True) + self._assert_tokenless_request_context( + request_context, ephemeral_user=True + ) def test_ephemeral_and_group_domain_name_mapping_success(self): env = {} @@ -581,14 +604,17 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_NAME'] = self.project_name env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') + self.config_fixture.config( + group='tokenless_auth', protocol='ephemeral' + ) self.protocol_id = 'ephemeral' mapping = copy.deepcopy( - mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER_AND_GROUP_DOMAIN_NAME) + mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER_AND_GROUP_DOMAIN_NAME + ) mapping['rules'][0]['local'][0]['group']['name'] = self.group['name'] - mapping['rules'][0]['local'][0]['group']['domain']['name'] = \ + mapping['rules'][0]['local'][0]['group']['domain']['name'] = ( self.domain['name'] + ) self._load_mapping_rules(mapping) req = self._do_middleware_request(extra_environ=env) @@ -601,14 +627,16 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_NAME'] = self.project_name env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') + self.config_fixture.config( + group='tokenless_auth', protocol='ephemeral' + ) self.protocol_id = 'ephemeral' # this mapping does not have the user type defined # and it should defaults to 'ephemeral' which is # the expected type for the test case. mapping = copy.deepcopy( - mapping_fixtures.MAPPING_FOR_DEFAULT_EPHEMERAL_USER) + mapping_fixtures.MAPPING_FOR_DEFAULT_EPHEMERAL_USER + ) mapping['rules'][0]['local'][0]['group']['id'] = self.group['id'] self._load_mapping_rules(mapping) @@ -616,8 +644,9 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, context = req.environ.get(authorization.AUTH_CONTEXT_ENV) self._assert_tokenless_auth_context(context, ephemeral_user=True) request_context = req.environ.get(keystone_context.REQUEST_CONTEXT_ENV) - self._assert_tokenless_request_context(request_context, - ephemeral_user=True) + self._assert_tokenless_request_context( + request_context, ephemeral_user=True + ) def test_ephemeral_any_user_success(self): """Verify ephemeral user does not need a specified user. @@ -629,8 +658,9 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_NAME'] = self.project_name env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') + self.config_fixture.config( + group='tokenless_auth', protocol='ephemeral' + ) self.protocol_id = 'ephemeral' mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) mapping['rules'][0]['local'][0]['group']['id'] = self.group['id'] @@ -640,8 +670,9 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, context = req.environ.get(authorization.AUTH_CONTEXT_ENV) self._assert_tokenless_auth_context(context, ephemeral_user=True) request_context = req.environ.get(keystone_context.REQUEST_CONTEXT_ENV) - self._assert_tokenless_request_context(request_context, - ephemeral_user=True) + self._assert_tokenless_request_context( + request_context, ephemeral_user=True + ) def test_ephemeral_invalid_scope_fail(self): env = {} @@ -649,16 +680,17 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') + self.config_fixture.config( + group='tokenless_auth', protocol='ephemeral' + ) self.protocol_id = 'ephemeral' mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) mapping['rules'][0]['local'][0]['group']['id'] = self.group['id'] self._load_mapping_rules(mapping) - self._middleware_failure(exception.Unauthorized, - extra_environ=env, - status=401) + self._middleware_failure( + exception.Unauthorized, extra_environ=env, status=401 + ) def test_ephemeral_no_group_found_fail(self): env = {} @@ -666,15 +698,17 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_NAME'] = self.project_name env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') + self.config_fixture.config( + group='tokenless_auth', protocol='ephemeral' + ) self.protocol_id = 'ephemeral' mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex self._load_mapping_rules(mapping) - self._middleware_failure(exception.MappedGroupNotFound, - extra_environ=env) + self._middleware_failure( + exception.MappedGroupNotFound, extra_environ=env + ) def test_ephemeral_incorrect_mapping_fail(self): """Test ephemeral user picking up the non-ephemeral user mapping. @@ -688,15 +722,15 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name env['SSL_CLIENT_USER_NAME'] = self.user['name'] # This will pick up the incorrect mapping - self.config_fixture.config(group='tokenless_auth', - protocol='x509') + self.config_fixture.config(group='tokenless_auth', protocol='x509') self.protocol_id = 'x509' mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex self._load_mapping_rules(mapping) - self._middleware_failure(exception.MappedGroupNotFound, - extra_environ=env) + self._middleware_failure( + exception.MappedGroupNotFound, extra_environ=env + ) def test_create_idp_id_success(self): env = {} @@ -709,14 +743,17 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, env = {} env[uuid.uuid4().hex] = self.client_issuer auth = tokenless_auth.TokenlessAuthHelper(env) - expected_msg = ('Could not determine Identity Provider ID. The ' - 'configuration option %s was not found in the ' - 'request environment.' % - CONF.tokenless_auth.issuer_attribute) + expected_msg = ( + 'Could not determine Identity Provider ID. The ' + 'configuration option %s was not found in the ' + 'request environment.' % CONF.tokenless_auth.issuer_attribute + ) # Check the content of the exception message as well - self.assertRaisesRegex(exception.TokenlessAuthConfigError, - expected_msg, - auth._build_idp_id) + self.assertRaisesRegex( + exception.TokenlessAuthConfigError, + expected_msg, + auth._build_idp_id, + ) def test_admin_token_context(self): self.config_fixture.config(admin_token='ADMIN') @@ -727,8 +764,7 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, self.assertNotIn('Invalid user token', log_fix.output) def test_request_non_admin(self): - self.config_fixture.config( - admin_token='ADMIN') + self.config_fixture.config(admin_token='ADMIN') log_fix = self.useFixture(fixtures.FakeLogger()) headers = {authorization.AUTH_TOKEN_HEADER: 'NOT-ADMIN'} self._do_middleware_request(headers=headers) @@ -741,15 +777,15 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests, user_id=self.user['id'], methods=['password'] ) token = PROVIDERS.token_provider_api.issue_token( - context['user_id'], context['methods'], project_id=self.project_id, - auth_context=context + context['user_id'], + context['methods'], + project_id=self.project_id, + auth_context=context, ) - headers = { - authorization.AUTH_TOKEN_HEADER: token.id.encode('utf-8') - } - with mock.patch.object(PROVIDERS.token_provider_api, - 'validate_token', - return_value=token) as token_mock: + headers = {authorization.AUTH_TOKEN_HEADER: token.id.encode('utf-8')} + with mock.patch.object( + PROVIDERS.token_provider_api, 'validate_token', return_value=token + ) as token_mock: self._do_middleware_request( path='/v3/projects', method='get', headers=headers ) diff --git a/keystone/tests/unit/test_policy.py b/keystone/tests/unit/test_policy.py index d0feec6397..5961fc7be1 100644 --- a/keystone/tests/unit/test_policy.py +++ b/keystone/tests/unit/test_policy.py @@ -56,8 +56,13 @@ class PolicyFileTestCase(unit.TestCase): with open(self.tmpfilename, "w") as policyfile: policyfile.write("""{"example:test": ["false:false"]}""") policy._ENFORCER._enforcer.clear() - self.assertRaises(exception.ForbiddenAction, policy.enforce, - empty_credentials, action, self.target) + self.assertRaises( + exception.ForbiddenAction, + policy.enforce, + empty_credentials, + action, + self.target, + ) class PolicyTestCase(unit.TestCase): @@ -68,8 +73,10 @@ class PolicyTestCase(unit.TestCase): "example:allowed": [], "example:denied": [["false:false"]], "example:get_http": [["http:http://www.example.com"]], - "example:my_file": [["role:compute_admin"], - ["project_id:%(project_id)s"]], + "example:my_file": [ + ["role:compute_admin"], + ["project_id:%(project_id)s"], + ], "example:early_and_fail": [["false:false", "rule:true"]], "example:early_or_success": [["rule:true"], ["false:false"]], "example:lowercase_admin": [["role:admin"], ["role:sysadmin"]], @@ -87,13 +94,23 @@ class PolicyTestCase(unit.TestCase): def test_enforce_nonexistent_action_throws(self): action = "example:noexist" - self.assertRaises(exception.ForbiddenAction, policy.enforce, - self.credentials, action, self.target) + self.assertRaises( + exception.ForbiddenAction, + policy.enforce, + self.credentials, + action, + self.target, + ) def test_enforce_bad_action_throws(self): action = "example:denied" - self.assertRaises(exception.ForbiddenAction, policy.enforce, - self.credentials, action, self.target) + self.assertRaises( + exception.ForbiddenAction, + policy.enforce, + self.credentials, + action, + self.target, + ) def test_enforce_good_action(self): action = "example:allowed" @@ -105,13 +122,23 @@ class PolicyTestCase(unit.TestCase): credentials = {'project_id': 'fake', 'roles': []} action = "example:my_file" policy.enforce(credentials, action, target_mine) - self.assertRaises(exception.ForbiddenAction, policy.enforce, - credentials, action, target_not_mine) + self.assertRaises( + exception.ForbiddenAction, + policy.enforce, + credentials, + action, + target_not_mine, + ) def test_early_AND_enforcement(self): action = "example:early_and_fail" - self.assertRaises(exception.ForbiddenAction, policy.enforce, - self.credentials, action, self.target) + self.assertRaises( + exception.ForbiddenAction, + policy.enforce, + self.credentials, + action, + self.target, + ) def test_early_OR_enforcement(self): action = "example:early_or_success" @@ -132,9 +159,7 @@ class PolicyScopeTypesEnforcementTestCase(unit.TestCase): def setUp(self): super(PolicyScopeTypesEnforcementTestCase, self).setUp() rule = common_policy.RuleDefault( - name='foo', - check_str='', - scope_types=['system'] + name='foo', check_str='', scope_types=['system'] ) policy._ENFORCER._enforcer.register_default(rule) self.credentials = {} @@ -144,8 +169,11 @@ class PolicyScopeTypesEnforcementTestCase(unit.TestCase): def test_forbidden_is_raised_if_enforce_scope_is_true(self): self.config_fixture.config(group='oslo_policy', enforce_scope=True) self.assertRaises( - exception.ForbiddenAction, policy.enforce, self.credentials, - self.action, self.target + exception.ForbiddenAction, + policy.enforce, + self.credentials, + self.action, + self.target, ) def test_warning_message_is_logged_if_enforce_scope_is_false(self): @@ -176,22 +204,31 @@ class PolicyJsonTestCase(unit.TestCase): def test_policies_loads(self): action = 'identity:list_projects' - target = {'user_id': uuid.uuid4().hex, - 'user.domain_id': uuid.uuid4().hex, - 'group.domain_id': uuid.uuid4().hex, - 'project.domain_id': uuid.uuid4().hex, - 'project_id': uuid.uuid4().hex, - 'domain_id': uuid.uuid4().hex} - credentials = {'username': uuid.uuid4().hex, 'token': uuid.uuid4().hex, - 'project_name': None, 'user_id': uuid.uuid4().hex, - 'roles': [u'admin'], 'is_admin': True, - 'is_admin_project': True, 'project_id': None, - 'domain_id': uuid.uuid4().hex} + target = { + 'user_id': uuid.uuid4().hex, + 'user.domain_id': uuid.uuid4().hex, + 'group.domain_id': uuid.uuid4().hex, + 'project.domain_id': uuid.uuid4().hex, + 'project_id': uuid.uuid4().hex, + 'domain_id': uuid.uuid4().hex, + } + credentials = { + 'username': uuid.uuid4().hex, + 'token': uuid.uuid4().hex, + 'project_name': None, + 'user_id': uuid.uuid4().hex, + 'roles': [u'admin'], + 'is_admin': True, + 'is_admin_project': True, + 'project_id': None, + 'domain_id': uuid.uuid4().hex, + } # The enforcer is setup behind the scenes and registers the in code # default policies. - result = policy._ENFORCER._enforcer.enforce(action, target, - credentials) + result = policy._ENFORCER._enforcer.enforce( + action, target, credentials + ) self.assertTrue(result) def test_all_targets_documented(self): @@ -199,17 +236,27 @@ class PolicyJsonTestCase(unit.TestCase): # These keys are in the policy.yaml but aren't targets. policy_rule_keys = [ - 'admin_or_owner', 'admin_or_token_subject', 'admin_required', - 'owner', 'service_admin_or_token_subject', 'service_or_admin', - 'service_role', 'token_subject', ] + 'admin_or_owner', + 'admin_or_token_subject', + 'admin_required', + 'owner', + 'service_admin_or_token_subject', + 'service_or_admin', + 'service_role', + 'token_subject', + ] def read_doc_targets(): # Parse the doc/source/policy_mapping.rst file and return the # targets. doc_path = os.path.join( - unit.ROOTDIR, 'doc', 'source', 'getting-started', - 'policy_mapping.rst') + unit.ROOTDIR, + 'doc', + 'source', + 'getting-started', + 'policy_mapping.rst', + ) with open(doc_path) as doc_file: for line in doc_file: if line.startswith('Target'): @@ -240,7 +287,7 @@ class GeneratePolicyFileTestCase(unit.TestCase): ret_val = subprocess.Popen( ['oslopolicy-policy-generator', '--namespace', 'keystone'], stdout=subprocess.PIPE, - stderr=subprocess.PIPE + stderr=subprocess.PIPE, ) output = ret_val.communicate() self.assertEqual(ret_val.returncode, 0, output) diff --git a/keystone/tests/unit/test_receipt_provider.py b/keystone/tests/unit/test_receipt_provider.py index c304687bbe..3243cdf1d5 100644 --- a/keystone/tests/unit/test_receipt_provider.py +++ b/keystone/tests/unit/test_receipt_provider.py @@ -44,16 +44,14 @@ class TestReceiptProvider(unit.TestCase): ksfixtures.KeyRepository( self.config_fixture, 'fernet_receipts', - CONF.fernet_receipts.max_active_keys + CONF.fernet_receipts.max_active_keys, ) ) self.load_backends() def test_unsupported_receipt_provider(self): - self.config_fixture.config(group='receipt', - provider='MyProvider') - self.assertRaises(ImportError, - receipt.provider.Manager) + self.config_fixture.config(group='receipt', provider='MyProvider') + self.assertRaises(ImportError, receipt.provider.Manager) def test_provider_receipt_expiration_validation(self): receipt = receipt_model.ReceiptModel() @@ -61,9 +59,11 @@ class TestReceiptProvider(unit.TestCase): receipt.expires_at = utils.isotime(CURRENT_DATE - DELTA) receipt.id = uuid.uuid4().hex with freezegun.freeze_time(CURRENT_DATE): - self.assertRaises(exception.ReceiptNotFound, - PROVIDERS.receipt_provider_api._is_valid_receipt, - receipt) + self.assertRaises( + exception.ReceiptNotFound, + PROVIDERS.receipt_provider_api._is_valid_receipt, + receipt, + ) # confirm a non-expired receipt doesn't throw errors. # returning None, rather than throwing an error is correct. @@ -73,10 +73,12 @@ class TestReceiptProvider(unit.TestCase): receipt.id = uuid.uuid4().hex with freezegun.freeze_time(CURRENT_DATE): self.assertIsNone( - PROVIDERS.receipt_provider_api._is_valid_receipt(receipt)) + PROVIDERS.receipt_provider_api._is_valid_receipt(receipt) + ) def test_validate_v3_none_receipt_raises_receipt_not_found(self): self.assertRaises( exception.ReceiptNotFound, PROVIDERS.receipt_provider_api.validate_receipt, - None) + None, + ) diff --git a/keystone/tests/unit/test_revoke.py b/keystone/tests/unit/test_revoke.py index 82391ae76b..c60dce225d 100644 --- a/keystone/tests/unit/test_revoke.py +++ b/keystone/tests/unit/test_revoke.py @@ -50,9 +50,11 @@ def _sample_blank_token(): class RevokeTests(object): def _assertTokenRevoked(self, token_data): - self.assertRaises(exception.TokenNotFound, - PROVIDERS.revoke_api.check_token, - token=token_data) + self.assertRaises( + exception.TokenNotFound, + PROVIDERS.revoke_api.check_token, + token=token_data, + ) def _assertTokenNotRevoked(self, token_data): self.assertIsNone(PROVIDERS.revoke_api.check_token(token_data)) @@ -134,11 +136,13 @@ class RevokeTests(object): # the token when passed in. first_token = _sample_blank_token() first_token['project_id'] = uuid.uuid4().hex - revocation_backend.revoke(revoke_model.RevokeEvent( - project_id=first_token['project_id'])) + revocation_backend.revoke( + revoke_model.RevokeEvent(project_id=first_token['project_id']) + ) self._assertTokenRevoked(first_token) - self.assertEqual(1, len(revocation_backend.list_events( - token=first_token))) + self.assertEqual( + 1, len(revocation_backend.list_events(token=first_token)) + ) # Create a second token, revoke it, check the token has been revoked, # and check the list to make sure that even though we now have 2 @@ -146,11 +150,13 @@ class RevokeTests(object): # only one match for our second_token should exist second_token = _sample_blank_token() second_token['project_id'] = uuid.uuid4().hex - revocation_backend.revoke(revoke_model.RevokeEvent( - project_id=second_token['project_id'])) + revocation_backend.revoke( + revoke_model.RevokeEvent(project_id=second_token['project_id']) + ) self._assertTokenRevoked(second_token) self.assertEqual( - 1, len(revocation_backend.list_events(token=second_token))) + 1, len(revocation_backend.list_events(token=second_token)) + ) # This gets a token but overrides project_id of the token to be None. # We expect that since there are two events which both have populated @@ -170,10 +176,12 @@ class RevokeTests(object): first_token = _sample_blank_token() first_token['audit_id'] = provider.random_urlsafe_str() PROVIDERS.revoke_api.revoke_by_audit_id( - audit_id=first_token['audit_id']) + audit_id=first_token['audit_id'] + ) self._assertTokenRevoked(first_token) self.assertEqual( - 1, len(revocation_backend.list_events(token=first_token))) + 1, len(revocation_backend.list_events(token=first_token)) + ) # Create a second token, revoke it, check it is revoked, check to make # sure that list events only finds 1 match since there are 2 and they @@ -181,10 +189,12 @@ class RevokeTests(object): second_token = _sample_blank_token() second_token['audit_id'] = provider.random_urlsafe_str() PROVIDERS.revoke_api.revoke_by_audit_id( - audit_id=second_token['audit_id']) + audit_id=second_token['audit_id'] + ) self._assertTokenRevoked(second_token) self.assertEqual( - 1, len(revocation_backend.list_events(token=second_token))) + 1, len(revocation_backend.list_events(token=second_token)) + ) # Create a third token with audit_id set to None to make sure that # since there are no events currently revoked with audit_id None this @@ -193,7 +203,8 @@ class RevokeTests(object): third_token['audit_id'] = None self._assertTokenNotRevoked(third_token) self.assertEqual( - 0, len(revocation_backend.list_events(token=third_token))) + 0, len(revocation_backend.list_events(token=third_token)) + ) def test_list_revoked_since(self): revocation_backend = sql.Revoke() @@ -215,26 +226,32 @@ class RevokeTests(object): first_token['audit_id'] = provider.random_urlsafe_str() # revoke event and then verify that there is only one revocation # and verify the only revoked event is the token - PROVIDERS.revoke_api.revoke(revoke_model.RevokeEvent( - user_id=first_token['user_id'], - project_id=first_token['project_id'], - audit_id=first_token['audit_id'])) + PROVIDERS.revoke_api.revoke( + revoke_model.RevokeEvent( + user_id=first_token['user_id'], + project_id=first_token['project_id'], + audit_id=first_token['audit_id'], + ) + ) self._assertTokenRevoked(first_token) self.assertEqual( - 1, len(revocation_backend.list_events(token=first_token))) + 1, len(revocation_backend.list_events(token=first_token)) + ) # If a token has None values which the event contains it shouldn't # match and not be revoked second_token = _sample_blank_token() self._assertTokenNotRevoked(second_token) self.assertEqual( - 0, len(revocation_backend.list_events(token=second_token))) + 0, len(revocation_backend.list_events(token=second_token)) + ) # If an event column and corresponding dict value don't match, Then # it should not add the event in the list. Demonstrate for project third_token = _sample_blank_token() third_token['project_id'] = uuid.uuid4().hex self._assertTokenNotRevoked(third_token) self.assertEqual( - 0, len(revocation_backend.list_events(token=third_token))) + 0, len(revocation_backend.list_events(token=third_token)) + ) # A revoked event with user_id as null and token user_id non null # should still be return an event and be revoked if other non null # event fields match non null token fields @@ -242,12 +259,16 @@ class RevokeTests(object): fourth_token['user_id'] = uuid.uuid4().hex fourth_token['project_id'] = uuid.uuid4().hex fourth_token['audit_id'] = provider.random_urlsafe_str() - PROVIDERS.revoke_api.revoke(revoke_model.RevokeEvent( - project_id=fourth_token['project_id'], - audit_id=fourth_token['audit_id'])) + PROVIDERS.revoke_api.revoke( + revoke_model.RevokeEvent( + project_id=fourth_token['project_id'], + audit_id=fourth_token['audit_id'], + ) + ) self._assertTokenRevoked(fourth_token) self.assertEqual( - 1, len(revocation_backend.list_events(token=fourth_token))) + 1, len(revocation_backend.list_events(token=fourth_token)) + ) def _user_field_test(self, field_name): token = _sample_blank_token() @@ -279,7 +300,8 @@ class RevokeTests(object): self._assertTokenNotRevoked(token_data) self.assertEqual( - 0, len(revocation_backend.list_events(token=token_data))) + 0, len(revocation_backend.list_events(token=token_data)) + ) PROVIDERS.revoke_api.revoke( revoke_model.RevokeEvent(domain_id=domain_id) @@ -287,7 +309,8 @@ class RevokeTests(object): self._assertTokenRevoked(token_data) self.assertEqual( - 1, len(revocation_backend.list_events(token=token_data))) + 1, len(revocation_backend.list_events(token=token_data)) + ) def test_by_domain_project(self): revocation_backend = sql.Revoke() @@ -300,16 +323,21 @@ class RevokeTests(object): self._assertTokenNotRevoked(token_data) self.assertEqual( - 0, len(revocation_backend.list_events(token=token_data))) + 0, len(revocation_backend.list_events(token=token_data)) + ) # If revoke a domain, then a token scoped to a project in the domain # is revoked. - PROVIDERS.revoke_api.revoke(revoke_model.RevokeEvent( - domain_id=token_data['assignment_domain_id'])) + PROVIDERS.revoke_api.revoke( + revoke_model.RevokeEvent( + domain_id=token_data['assignment_domain_id'] + ) + ) self._assertTokenRevoked(token_data) self.assertEqual( - 1, len(revocation_backend.list_events(token=token_data))) + 1, len(revocation_backend.list_events(token=token_data)) + ) def test_by_domain_domain(self): revocation_backend = sql.Revoke() @@ -321,15 +349,20 @@ class RevokeTests(object): self._assertTokenNotRevoked(token_data) self.assertEqual( - 0, len(revocation_backend.list_events(token=token_data))) + 0, len(revocation_backend.list_events(token=token_data)) + ) # If revoke a domain, then a token scoped to the domain is revoked. - PROVIDERS.revoke_api.revoke(revoke_model.RevokeEvent( - domain_id=token_data['assignment_domain_id'])) + PROVIDERS.revoke_api.revoke( + revoke_model.RevokeEvent( + domain_id=token_data['assignment_domain_id'] + ) + ) self._assertTokenRevoked(token_data) self.assertEqual( - 1, len(revocation_backend.list_events(token=token_data))) + 1, len(revocation_backend.list_events(token=token_data)) + ) def test_revoke_by_audit_id(self): token = _sample_blank_token() @@ -369,8 +402,7 @@ class RevokeTests(object): def test_expired_events_are_removed(self, mock_utcnow): def _sample_token_values(): token = _sample_blank_token() - token['expires_at'] = utils.isotime(_future_time(), - subsecond=True) + token['expires_at'] = utils.isotime(_future_time(), subsecond=True) return token now = datetime.datetime.utcnow() @@ -384,18 +416,22 @@ class RevokeTests(object): audit_chain_id = uuid.uuid4().hex PROVIDERS.revoke_api.revoke_by_audit_chain_id(audit_chain_id) token_values['audit_chain_id'] = audit_chain_id - self.assertRaises(exception.TokenNotFound, - PROVIDERS.revoke_api.check_token, - token_values) + self.assertRaises( + exception.TokenNotFound, + PROVIDERS.revoke_api.check_token, + token_values, + ) # Move our clock forward by 2h, build a new token and validate it. # 'synchronize' should now be exercised and remove old expired events mock_utcnow.return_value = now_plus_2h PROVIDERS.revoke_api.revoke_by_audit_chain_id(audit_chain_id) # two hours later, it should still be not found - self.assertRaises(exception.TokenNotFound, - PROVIDERS.revoke_api.check_token, - token_values) + self.assertRaises( + exception.TokenNotFound, + PROVIDERS.revoke_api.check_token, + token_values, + ) def test_delete_group_without_role_does_not_revoke_users(self): revocation_backend = sql.Revoke() @@ -425,7 +461,8 @@ class RevokeTests(object): user_id=user2['id'], group_id=group1['id'] ) self.assertEqual( - 2, len(PROVIDERS.identity_api.list_users_in_group(group1['id']))) + 2, len(PROVIDERS.identity_api.list_users_in_group(group1['id'])) + ) PROVIDERS.identity_api.delete_group(group1['id']) self.assertEqual(0, len(revocation_backend.list_events())) @@ -447,7 +484,8 @@ class RevokeTests(object): user_id=user2['id'], group_id=group2['id'] ) self.assertEqual( - 2, len(PROVIDERS.identity_api.list_users_in_group(group2['id']))) + 2, len(PROVIDERS.identity_api.list_users_in_group(group2['id'])) + ) PROVIDERS.identity_api.delete_group(group2['id']) self.assertEqual(2, len(revocation_backend.list_events())) @@ -456,13 +494,12 @@ class FernetSqlRevokeTests(test_backend_sql.SqlTests, RevokeTests): def config_overrides(self): super(FernetSqlRevokeTests, self).config_overrides() self.config_fixture.config( - group='token', - provider='fernet', - revoke_by_id=False) + group='token', provider='fernet', revoke_by_id=False + ) self.useFixture( ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) diff --git a/keystone/tests/unit/test_shadow_users.py b/keystone/tests/unit/test_shadow_users.py index 30dc9e7057..a8ed648616 100644 --- a/keystone/tests/unit/test_shadow_users.py +++ b/keystone/tests/unit/test_shadow_users.py @@ -23,19 +23,22 @@ from keystone.tests.unit.ksfixtures import database PROVIDERS = provider_api.ProviderAPIs -class ShadowUsersTests(unit.TestCase, - test_backend.ShadowUsersBackendTests, - test_core.ShadowUsersCoreTests): +class ShadowUsersTests( + unit.TestCase, + test_backend.ShadowUsersBackendTests, + test_core.ShadowUsersCoreTests, +): def setUp(self): super(ShadowUsersTests, self).setUp() self.useFixture(database.Database()) self.load_backends() PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) self.idp = { 'id': uuid.uuid4().hex, 'enabled': True, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } self.mapping = { 'id': uuid.uuid4().hex, @@ -43,13 +46,13 @@ class ShadowUsersTests(unit.TestCase, self.protocol = { 'id': uuid.uuid4().hex, 'idp_id': self.idp['id'], - 'mapping_id': self.mapping['id'] + 'mapping_id': self.mapping['id'], } self.federated_user = { 'idp_id': self.idp['id'], 'protocol_id': self.protocol['id'], 'unique_id': uuid.uuid4().hex, - 'display_name': uuid.uuid4().hex + 'display_name': uuid.uuid4().hex, } self.email = uuid.uuid4().hex PROVIDERS.federation_api.create_idp(self.idp['id'], self.idp) @@ -57,9 +60,11 @@ class ShadowUsersTests(unit.TestCase, self.mapping['id'], self.mapping ) PROVIDERS.federation_api.create_protocol( - self.idp['id'], self.protocol['id'], self.protocol) - self.domain_id = ( - PROVIDERS.federation_api.get_idp(self.idp['id'])['domain_id']) + self.idp['id'], self.protocol['id'], self.protocol + ) + self.domain_id = PROVIDERS.federation_api.get_idp(self.idp['id'])[ + 'domain_id' + ] class TestUserWithFederatedUser(ShadowUsersTests): @@ -71,16 +76,20 @@ class TestUserWithFederatedUser(ShadowUsersTests): def assertFederatedDictsEqual(self, fed_dict, fed_object): self.assertEqual(fed_dict['idp_id'], fed_object['idp_id']) - self.assertEqual(fed_dict['protocol_id'], - fed_object['protocols'][0]['protocol_id']) - self.assertEqual(fed_dict['unique_id'], - fed_object['protocols'][0]['unique_id']) + self.assertEqual( + fed_dict['protocol_id'], fed_object['protocols'][0]['protocol_id'] + ) + self.assertEqual( + fed_dict['unique_id'], fed_object['protocols'][0]['unique_id'] + ) def test_get_user_when_user_has_federated_object(self): - fed_dict = unit.new_federated_user_ref(idp_id=self.idp['id'], - protocol_id=self.protocol['id']) + fed_dict = unit.new_federated_user_ref( + idp_id=self.idp['id'], protocol_id=self.protocol['id'] + ) user = self.shadow_users_api.create_federated_user( - self.domain_id, fed_dict) + self.domain_id, fed_dict + ) # test that the user returns a federated object and that there is only # one returned @@ -96,23 +105,20 @@ class TestUserWithFederatedUser(ShadowUsersTests): { 'idp_id': 'fakeidp', 'protocols': [ - { - 'protocol_id': 'nonexistent', - 'unique_id': 'unknown' - } - ] + {'protocol_id': 'nonexistent', 'unique_id': 'unknown'} + ], } ] # Check validation works by throwing a federated object with # invalid idp_id, protocol_id inside the user passed to create_user. - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - baduser) + self.assertRaises( + exception.ValidationError, self.identity_api.create_user, baduser + ) baduser['federated'][0]['idp_id'] = self.idp['id'] - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - baduser) + self.assertRaises( + exception.ValidationError, self.identity_api.create_user, baduser + ) def test_create_user_with_federated_attributes(self): # Create the schema of a federated attribute being passed in with a @@ -125,19 +131,21 @@ class TestUserWithFederatedUser(ShadowUsersTests): 'protocols': [ { 'protocol_id': self.protocol['id'], - 'unique_id': unique_id + 'unique_id': unique_id, } - ] + ], } ] # Test that there are no current federated_users that match our users # federated object and create the user - self.assertRaises(exception.UserNotFound, - self.shadow_users_api.get_federated_user, - self.idp['id'], - self.protocol['id'], - unique_id) + self.assertRaises( + exception.UserNotFound, + self.shadow_users_api.get_federated_user, + self.idp['id'], + self.protocol['id'], + unique_id, + ) ref = self.identity_api.create_user(user) @@ -145,9 +153,8 @@ class TestUserWithFederatedUser(ShadowUsersTests): self.assertEqual(user['name'], ref['name']) self.assertEqual(user['federated'], ref['federated']) fed_user = self.shadow_users_api.get_federated_user( - self.idp['id'], - self.protocol['id'], - unique_id) + self.idp['id'], self.protocol['id'], unique_id + ) self.assertIsNotNone(fed_user) def test_update_user_with_invalid_idp_and_protocol_fails(self): @@ -156,34 +163,33 @@ class TestUserWithFederatedUser(ShadowUsersTests): { 'idp_id': 'fakeidp', 'protocols': [ - { - 'protocol_id': 'nonexistent', - 'unique_id': 'unknown' - } - ] + {'protocol_id': 'nonexistent', 'unique_id': 'unknown'} + ], } ] # Check validation works by throwing a federated object with # invalid idp_id, protocol_id inside the user passed to create_user. - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - baduser) + self.assertRaises( + exception.ValidationError, self.identity_api.create_user, baduser + ) baduser['federated'][0]['idp_id'] = self.idp['id'] - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - baduser) + self.assertRaises( + exception.ValidationError, self.identity_api.create_user, baduser + ) def test_update_user_with_federated_attributes(self): user = self.shadow_users_api.create_federated_user( - self.domain_id, self.federated_user) + self.domain_id, self.federated_user + ) user = self.identity_api.get_user(user['id']) # Test that update user can return a federated object with the user as # a response if the user has any user = self.identity_api.update_user(user['id'], user) - self.assertFederatedDictsEqual(self.federated_user, - user['federated'][0]) + self.assertFederatedDictsEqual( + self.federated_user, user['federated'][0] + ) # Test that update user can replace a users federated objects if added # in the request and that its response is that new federated objects @@ -193,9 +199,9 @@ class TestUserWithFederatedUser(ShadowUsersTests): 'protocols': [ { 'protocol_id': self.protocol['id'], - 'unique_id': uuid.uuid4().hex + 'unique_id': uuid.uuid4().hex, } - ] + ], } ] user['federated'] = new_fed diff --git a/keystone/tests/unit/test_sql_banned_operations.py b/keystone/tests/unit/test_sql_banned_operations.py index 17e27c7f42..4dbc803760 100644 --- a/keystone/tests/unit/test_sql_banned_operations.py +++ b/keystone/tests/unit/test_sql_banned_operations.py @@ -72,7 +72,8 @@ class BannedDBSchemaOperations(fixtures.Fixture): for op in self._banned_ops: self.useFixture( fixtures.MonkeyPatch( - 'alembic.op.%s' % op, self._explode(op, self._revision), + 'alembic.op.%s' % op, + self._explode(op, self._revision), ) ) @@ -128,8 +129,9 @@ class KeystoneMigrationsWalk( # Override keystone's context manager to be oslo.db's global context # manager. sql.core._TESTING_USE_GLOBAL_CONTEXT_MANAGER = True - self.addCleanup(setattr, - sql.core, '_TESTING_USE_GLOBAL_CONTEXT_MANAGER', False) + self.addCleanup( + setattr, sql.core, '_TESTING_USE_GLOBAL_CONTEXT_MANAGER', False + ) self.addCleanup(sql.cleanup) def _migrate_up(self, connection, revision): @@ -141,9 +143,8 @@ class KeystoneMigrationsWalk( self.assertIsNotNone( getattr(self, '_check_%s' % version, None), - ( - 'DB Migration %s does not have a test; you must add one' - ) % version, + ('DB Migration %s does not have a test; you must add one') + % version, ) pre_upgrade = getattr(self, '_pre_upgrade_%s' % version, None) @@ -194,23 +195,23 @@ class KeystoneMigrationsWalk( def _pre_upgrade_99de3849d860(self, connection): inspector = sqlalchemy.inspect(connection) - for table, constraint in ( - self._99de3849d860_removed_constraints.items() - ): + for ( + table, + constraint, + ) in self._99de3849d860_removed_constraints.items(): constraints = [ - x['name'] for x in - inspector.get_unique_constraints(table) + x['name'] for x in inspector.get_unique_constraints(table) ] self.assertIn(constraint, constraints) def _check_99de3849d860(self, connection): inspector = sqlalchemy.inspect(connection) - for table, constraint in ( - self._99de3849d860_removed_constraints.items() - ): + for ( + table, + constraint, + ) in self._99de3849d860_removed_constraints.items(): constraints = [ - x['name'] for x in - inspector.get_unique_constraints(table) + x['name'] for x in inspector.get_unique_constraints(table) ] self.assertNotIn(constraint, constraints) @@ -226,9 +227,13 @@ class KeystoneMigrationsWalk( for c in constraints: all_constraints + c.get('column_names', []) - not_allowed_constraints = ['trustor_user_id', 'trustee_user_id', - 'project_id', 'impersonation', 'expires_at', - ] + not_allowed_constraints = [ + 'trustor_user_id', + 'trustee_user_id', + 'project_id', + 'impersonation', + 'expires_at', + ] for not_c in not_allowed_constraints: self.assertNotIn(not_c, all_constraints) @@ -240,8 +245,7 @@ class KeystoneMigrationsWalk( {x['name'] for x in constraints}, ) constraint = [ - x for x in constraints if x['name'] == - 'duplicate_trust_constraint' + x for x in constraints if x['name'] == 'duplicate_trust_constraint' ][0] self.assertEqual( [ diff --git a/keystone/tests/unit/test_sql_upgrade.py b/keystone/tests/unit/test_sql_upgrade.py index 123e4648ed..9579e56367 100644 --- a/keystone/tests/unit/test_sql_upgrade.py +++ b/keystone/tests/unit/test_sql_upgrade.py @@ -61,164 +61,334 @@ CONF = keystone.conf.CONF # { : [, , ...], ... } INITIAL_TABLE_STRUCTURE = { 'config_register': [ - 'type', 'domain_id', + 'type', + 'domain_id', ], 'credential': [ - 'id', 'user_id', 'project_id', 'type', 'extra', 'key_hash', + 'id', + 'user_id', + 'project_id', + 'type', + 'extra', + 'key_hash', 'encrypted_blob', ], 'endpoint': [ - 'id', 'legacy_endpoint_id', 'interface', 'region_id', 'service_id', - 'url', 'enabled', 'extra', + 'id', + 'legacy_endpoint_id', + 'interface', + 'region_id', + 'service_id', + 'url', + 'enabled', + 'extra', ], 'group': [ - 'id', 'domain_id', 'name', 'description', 'extra', + 'id', + 'domain_id', + 'name', + 'description', + 'extra', ], 'policy': [ - 'id', 'type', 'blob', 'extra', + 'id', + 'type', + 'blob', + 'extra', ], 'project': [ - 'id', 'name', 'extra', 'description', 'enabled', 'domain_id', - 'parent_id', 'is_domain', + 'id', + 'name', + 'extra', + 'description', + 'enabled', + 'domain_id', + 'parent_id', + 'is_domain', ], 'project_option': [ - 'project_id', 'option_id', 'option_value', + 'project_id', + 'option_id', + 'option_value', ], 'project_tag': [ - 'project_id', 'name', + 'project_id', + 'name', ], 'role': [ - 'id', 'name', 'extra', 'domain_id', 'description', + 'id', + 'name', + 'extra', + 'domain_id', + 'description', ], 'role_option': [ - 'role_id', 'option_id', 'option_value', + 'role_id', + 'option_id', + 'option_value', ], 'service': [ - 'id', 'type', 'extra', 'enabled', + 'id', + 'type', + 'extra', + 'enabled', ], 'token': [ - 'id', 'expires', 'extra', 'valid', 'trust_id', 'user_id', + 'id', + 'expires', + 'extra', + 'valid', + 'trust_id', + 'user_id', ], 'trust': [ - 'id', 'trustor_user_id', 'trustee_user_id', 'project_id', - 'impersonation', 'deleted_at', 'expires_at', 'remaining_uses', 'extra', - 'expires_at_int', 'redelegated_trust_id', 'redelegation_count', + 'id', + 'trustor_user_id', + 'trustee_user_id', + 'project_id', + 'impersonation', + 'deleted_at', + 'expires_at', + 'remaining_uses', + 'extra', + 'expires_at_int', + 'redelegated_trust_id', + 'redelegation_count', ], 'trust_role': [ - 'trust_id', 'role_id', + 'trust_id', + 'role_id', ], 'user': [ - 'id', 'extra', 'enabled', 'default_project_id', 'created_at', - 'last_active_at', 'domain_id', + 'id', + 'extra', + 'enabled', + 'default_project_id', + 'created_at', + 'last_active_at', + 'domain_id', ], 'user_option': [ - 'user_id', 'option_id', 'option_value', + 'user_id', + 'option_id', + 'option_value', ], 'user_group_membership': [ - 'user_id', 'group_id', + 'user_id', + 'group_id', ], 'region': [ - 'id', 'description', 'parent_region_id', 'extra', + 'id', + 'description', + 'parent_region_id', + 'extra', ], 'assignment': [ - 'type', 'actor_id', 'target_id', 'role_id', 'inherited', + 'type', + 'actor_id', + 'target_id', + 'role_id', + 'inherited', ], 'id_mapping': [ - 'public_id', 'domain_id', 'local_id', 'entity_type', + 'public_id', + 'domain_id', + 'local_id', + 'entity_type', ], 'whitelisted_config': [ - 'domain_id', 'group', 'option', 'value', + 'domain_id', + 'group', + 'option', + 'value', ], 'sensitive_config': [ - 'domain_id', 'group', 'option', 'value', + 'domain_id', + 'group', + 'option', + 'value', ], 'policy_association': [ - 'id', 'policy_id', 'endpoint_id', 'service_id', 'region_id', + 'id', + 'policy_id', + 'endpoint_id', + 'service_id', + 'region_id', ], 'identity_provider': [ - 'id', 'enabled', 'description', 'domain_id', 'authorization_ttl', + 'id', + 'enabled', + 'description', + 'domain_id', + 'authorization_ttl', ], 'federation_protocol': [ - 'id', 'idp_id', 'mapping_id', 'remote_id_attribute', + 'id', + 'idp_id', + 'mapping_id', + 'remote_id_attribute', ], 'mapping': [ - 'id', 'rules', 'schema_version', + 'id', + 'rules', + 'schema_version', ], 'service_provider': [ - 'auth_url', 'id', 'enabled', 'description', 'sp_url', + 'auth_url', + 'id', + 'enabled', + 'description', + 'sp_url', 'relay_state_prefix', ], 'idp_remote_ids': [ - 'idp_id', 'remote_id', + 'idp_id', + 'remote_id', ], 'consumer': [ - 'id', 'description', 'secret', 'extra', + 'id', + 'description', + 'secret', + 'extra', ], 'request_token': [ - 'id', 'request_secret', 'verifier', 'authorizing_user_id', - 'requested_project_id', 'role_ids', 'consumer_id', 'expires_at', + 'id', + 'request_secret', + 'verifier', + 'authorizing_user_id', + 'requested_project_id', + 'role_ids', + 'consumer_id', + 'expires_at', ], 'access_token': [ - 'id', 'access_secret', 'authorizing_user_id', 'project_id', 'role_ids', - 'consumer_id', 'expires_at', + 'id', + 'access_secret', + 'authorizing_user_id', + 'project_id', + 'role_ids', + 'consumer_id', + 'expires_at', ], 'revocation_event': [ - 'id', 'domain_id', 'project_id', 'user_id', 'role_id', 'trust_id', - 'consumer_id', 'access_token_id', 'issued_before', 'expires_at', - 'revoked_at', 'audit_id', 'audit_chain_id', - ], - 'project_endpoint': [ - 'endpoint_id', 'project_id' + 'id', + 'domain_id', + 'project_id', + 'user_id', + 'role_id', + 'trust_id', + 'consumer_id', + 'access_token_id', + 'issued_before', + 'expires_at', + 'revoked_at', + 'audit_id', + 'audit_chain_id', ], + 'project_endpoint': ['endpoint_id', 'project_id'], 'endpoint_group': [ - 'id', 'name', 'description', 'filters', + 'id', + 'name', + 'description', + 'filters', ], 'project_endpoint_group': [ - 'endpoint_group_id', 'project_id', + 'endpoint_group_id', + 'project_id', ], 'implied_role': [ - 'prior_role_id', 'implied_role_id', + 'prior_role_id', + 'implied_role_id', ], 'local_user': [ - 'id', 'user_id', 'domain_id', 'name', 'failed_auth_count', + 'id', + 'user_id', + 'domain_id', + 'name', + 'failed_auth_count', 'failed_auth_at', ], 'password': [ - 'id', 'local_user_id', 'created_at', 'expires_at', - 'self_service', 'password_hash', 'created_at_int', 'expires_at_int', + 'id', + 'local_user_id', + 'created_at', + 'expires_at', + 'self_service', + 'password_hash', + 'created_at_int', + 'expires_at_int', ], 'federated_user': [ - 'id', 'user_id', 'idp_id', 'protocol_id', 'unique_id', 'display_name', + 'id', + 'user_id', + 'idp_id', + 'protocol_id', + 'unique_id', + 'display_name', ], 'nonlocal_user': [ - 'domain_id', 'name', 'user_id', + 'domain_id', + 'name', + 'user_id', ], 'system_assignment': [ - 'type', 'actor_id', 'target_id', 'role_id', 'inherited', + 'type', + 'actor_id', + 'target_id', + 'role_id', + 'inherited', ], 'registered_limit': [ - 'internal_id', 'id', 'service_id', 'region_id', 'resource_name', - 'default_limit', 'description', + 'internal_id', + 'id', + 'service_id', + 'region_id', + 'resource_name', + 'default_limit', + 'description', ], 'limit': [ - 'internal_id', 'id', 'project_id', 'resource_limit', 'description', - 'registered_limit_id', 'domain_id', + 'internal_id', + 'id', + 'project_id', + 'resource_limit', + 'description', + 'registered_limit_id', + 'domain_id', ], 'application_credential': [ - 'internal_id', 'id', 'name', 'secret_hash', 'description', 'user_id', - 'project_id', 'expires_at', 'system', 'unrestricted', + 'internal_id', + 'id', + 'name', + 'secret_hash', + 'description', + 'user_id', + 'project_id', + 'expires_at', + 'system', + 'unrestricted', ], 'application_credential_role': [ - 'application_credential_id', 'role_id', + 'application_credential_id', + 'role_id', ], 'access_rule': [ - 'id', 'service', 'path', 'method', 'external_id', 'user_id', + 'id', + 'service', + 'path', + 'method', + 'external_id', + 'user_id', ], 'application_credential_access_rule': [ - 'application_credential_id', 'access_rule_id', + 'application_credential_id', + 'access_rule_id', ], 'expiring_user_group_membership': [ - 'user_id', 'group_id', 'idp_id', 'last_verified', + 'user_id', + 'group_id', + 'idp_id', + 'last_verified', ], } @@ -243,8 +413,9 @@ class MigrateBase( # Override keystone's context manager to be oslo.db's global context # manager. sql.core._TESTING_USE_GLOBAL_CONTEXT_MANAGER = True - self.addCleanup(setattr, - sql.core, '_TESTING_USE_GLOBAL_CONTEXT_MANAGER', False) + self.addCleanup( + setattr, sql.core, '_TESTING_USE_GLOBAL_CONTEXT_MANAGER', False + ) self.addCleanup(sql.cleanup) def expand(self): @@ -262,7 +433,9 @@ class MigrateBase( def load_table(self, name): table = sqlalchemy.Table( - name, self.metadata, autoload_with=self.engine, + name, + self.metadata, + autoload_with=self.engine, ) return table @@ -272,7 +445,9 @@ class MigrateBase( # detect renamed or dropped tables try: sqlalchemy.Table( - table_name, self.metadata, autoload_with=self.engine, + table_name, + self.metadata, + autoload_with=self.engine, ) except sqlalchemy.exc.NoSuchTableError: pass @@ -285,8 +460,9 @@ class MigrateBase( actual_cols = [col.name for col in table.columns] # Check if the columns are equal, but allow for a different order, # which might occur after an upgrade followed by a downgrade - self.assertCountEqual(expected_cols, actual_cols, - '%s table' % table_name) + self.assertCountEqual( + expected_cols, actual_cols, '%s table' % table_name + ) def test_db_sync_check(self): checker = cli.DbSync() diff --git a/keystone/tests/unit/test_token_provider.py b/keystone/tests/unit/test_token_provider.py index f85e126af7..af3984dba2 100644 --- a/keystone/tests/unit/test_token_provider.py +++ b/keystone/tests/unit/test_token_provider.py @@ -44,7 +44,7 @@ class TestTokenProvider(unit.TestCase): ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) self.load_backends() @@ -54,19 +54,19 @@ class TestTokenProvider(unit.TestCase): self.assertEqual(s, urllib.parse.quote_plus(s)) def test_unsupported_token_provider(self): - self.config_fixture.config(group='token', - provider='MyProvider') - self.assertRaises(ImportError, - token.provider.Manager) + self.config_fixture.config(group='token', provider='MyProvider') + self.assertRaises(ImportError, token.provider.Manager) def test_provider_token_expiration_validation(self): token = token_model.TokenModel() token.issued_at = "2013-05-21T00:02:43.941473Z" token.expires_at = utils.isotime(CURRENT_DATE) - self.assertRaises(exception.TokenNotFound, - PROVIDERS.token_provider_api._is_valid_token, - token) + self.assertRaises( + exception.TokenNotFound, + PROVIDERS.token_provider_api._is_valid_token, + token, + ) token = token_model.TokenModel() token.issued_at = "2013-05-21T00:02:43.941473Z" @@ -77,4 +77,5 @@ class TestTokenProvider(unit.TestCase): self.assertRaises( exception.TokenNotFound, PROVIDERS.token_provider_api.validate_token, - None) + None, + ) diff --git a/keystone/tests/unit/test_v3.py b/keystone/tests/unit/test_v3.py index 999e0e252c..1f61225e51 100644 --- a/keystone/tests/unit/test_v3.py +++ b/keystone/tests/unit/test_v3.py @@ -38,23 +38,36 @@ DEFAULT_DOMAIN_ID = 'default' TIME_FORMAT = unit.TIME_FORMAT -class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, - common_auth.AuthTestMixin): +class RestfulTestCase( + unit.SQLDriverOverrides, rest.RestfulTestCase, common_auth.AuthTestMixin +): - def generate_token_schema(self, system_scoped=False, domain_scoped=False, - project_scoped=False): + def generate_token_schema( + self, system_scoped=False, domain_scoped=False, project_scoped=False + ): """Return a dictionary of token properties to validate against.""" ROLES_SCHEMA = { 'type': 'array', 'items': { 'type': 'object', 'properties': { - 'id': {'type': 'string', }, - 'name': {'type': 'string', }, - 'description': {'type': 'string', }, - 'options': {'type': 'object', } + 'id': { + 'type': 'string', + }, + 'name': { + 'type': 'string', + }, + 'description': { + 'type': 'string', + }, + 'options': { + 'type': 'object', + }, }, - 'required': ['id', 'name', ], + 'required': [ + 'id', + 'name', + ], 'additionalProperties': False, }, 'minItems': 1, @@ -93,7 +106,7 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'type': 'object', 'properties': { 'id': {'type': 'string'}, - 'name': {'type': 'string'} + 'name': {'type': 'string'}, }, 'required': ['id', 'name'], 'additonalProperties': False, @@ -101,19 +114,17 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'password_expires_at': { 'type': ['string', 'null'], 'pattern': unit.TIME_FORMAT_REGEX, - } + }, }, 'additionalProperties': False, - } + }, } if system_scoped: properties['catalog'] = {'type': 'array'} properties['system'] = { 'type': 'object', - 'properties': { - 'all': {'type': 'boolean'} - } + 'properties': {'all': {'type': 'boolean'}}, } properties['roles'] = ROLES_SCHEMA elif domain_scoped: @@ -124,9 +135,9 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'required': ['id', 'name'], 'properties': { 'id': {'type': 'string'}, - 'name': {'type': 'string'} + 'name': {'type': 'string'}, }, - 'additionalProperties': False + 'additionalProperties': False, } elif project_scoped: properties['is_admin_project'] = {'type': 'boolean'} @@ -134,7 +145,10 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, # FIXME(lbragstad): Remove this in favor of the predefined # ROLES_SCHEMA dictionary once bug 1763510 is fixed. ROLES_SCHEMA['items']['properties']['domain_id'] = { - 'type': ['null', 'string', ], + 'type': [ + 'null', + 'string', + ], } properties['roles'] = ROLES_SCHEMA properties['is_domain'] = {'type': 'boolean'} @@ -149,21 +163,26 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'required': ['id', 'name'], 'properties': { 'id': {'type': 'string'}, - 'name': {'type': 'string'} + 'name': {'type': 'string'}, }, - 'additionalProperties': False - } + 'additionalProperties': False, + }, }, - 'additionalProperties': False + 'additionalProperties': False, } schema = { 'type': 'object', 'properties': properties, - 'required': ['audit_ids', 'expires_at', 'issued_at', 'methods', - 'user'], + 'required': [ + 'audit_ids', + 'expires_at', + 'issued_at', + 'methods', + 'user', + ], 'optional': [], - 'additionalProperties': False + 'additionalProperties': False, } if system_scoped: @@ -207,14 +226,16 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, except exception.DomainNotFound: root_domain = unit.new_domain_ref( id=resource_base.NULL_DOMAIN_ID, - name=resource_base.NULL_DOMAIN_ID + name=resource_base.NULL_DOMAIN_ID, + ) + PROVIDERS.resource_api.create_domain( + resource_base.NULL_DOMAIN_ID, root_domain ) - PROVIDERS.resource_api.create_domain(resource_base.NULL_DOMAIN_ID, - root_domain) domain = unit.new_domain_ref( description=(u'The default domain'), id=DEFAULT_DOMAIN_ID, - name=u'Default') + name=u'Default', + ) PROVIDERS.resource_api.create_domain(DEFAULT_DOMAIN_ID, domain) def load_sample_data(self, create_region_and_endpoints=True): @@ -229,21 +250,23 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.project_id, self.project ) - self.user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain_id) + self.user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain_id + ) self.user_id = self.user['id'] self.default_domain_project_id = uuid.uuid4().hex self.default_domain_project = unit.new_project_ref( - domain_id=DEFAULT_DOMAIN_ID) + domain_id=DEFAULT_DOMAIN_ID + ) self.default_domain_project['id'] = self.default_domain_project_id PROVIDERS.resource_api.create_project( self.default_domain_project_id, self.default_domain_project ) self.default_domain_user = unit.create_user( - PROVIDERS.identity_api, - domain_id=DEFAULT_DOMAIN_ID) + PROVIDERS.identity_api, domain_id=DEFAULT_DOMAIN_ID + ) self.default_domain_user_id = self.default_domain_user['id'] # create & grant policy.yaml's default role for admin_required @@ -251,22 +274,27 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.role_id = self.role['id'] PROVIDERS.role_api.create_role(self.role_id, self.role) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_id, self.project_id, self.role_id) + self.user_id, self.project_id, self.role_id + ) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.default_domain_user_id, self.default_domain_project_id, - self.role_id) + self.default_domain_user_id, + self.default_domain_project_id, + self.role_id, + ) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.default_domain_user_id, self.project_id, - self.role_id) + self.default_domain_user_id, self.project_id, self.role_id + ) # Create "req_admin" user for simulating a real user instead of the # admin_token_auth middleware - self.user_reqadmin = unit.create_user(PROVIDERS.identity_api, - DEFAULT_DOMAIN_ID) + self.user_reqadmin = unit.create_user( + PROVIDERS.identity_api, DEFAULT_DOMAIN_ID + ) PROVIDERS.assignment_api.add_role_to_user_and_project( self.user_reqadmin['id'], self.default_domain_project_id, - self.role_id) + self.role_id, + ) if create_region_and_endpoints: self.region = unit.new_region_ref() @@ -279,9 +307,11 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.service_id, self.service.copy() ) - self.endpoint = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) + self.endpoint = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + ) self.endpoint_id = self.endpoint['id'] PROVIDERS.catalog_api.create_endpoint( self.endpoint_id, self.endpoint.copy() @@ -289,16 +319,15 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, # The server adds 'enabled' and defaults to True. self.endpoint['enabled'] = True - def create_new_default_project_for_user(self, user_id, domain_id, - enable_project=True): + def create_new_default_project_for_user( + self, user_id, domain_id, enable_project=True + ): ref = unit.new_project_ref(domain_id=domain_id, enabled=enable_project) r = self.post('/projects', body={'project': ref}) project = self.assertValidProjectResponse(r, ref) # set the user's preferred project body = {'user': {'default_project_id': project['id']}} - r = self.patch('/users/%(user_id)s' % { - 'user_id': user_id}, - body=body) + r = self.patch('/users/%(user_id)s' % {'user_id': user_id}, body=body) self.assertValidUserResponse(r) return project @@ -318,17 +347,18 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'password': self.user_reqadmin['password'], 'domain': { 'id': self.user_reqadmin['domain_id'] - } + }, } - } + }, }, 'scope': { 'project': { 'id': self.default_domain_project_id, } - } + }, } - }) + }, + ) return r.headers.get('X-Subject-Token') def get_unscoped_token(self): @@ -344,14 +374,13 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'user': { 'name': self.user['name'], 'password': self.user['password'], - 'domain': { - 'id': self.user['domain_id'] - } + 'domain': {'id': self.user['domain_id']}, } - } + }, } } - }) + }, + ) return r.headers.get('X-Subject-Token') def get_scoped_token(self): @@ -367,19 +396,18 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'user': { 'name': self.user['name'], 'password': self.user['password'], - 'domain': { - 'id': self.user['domain_id'] - } + 'domain': {'id': self.user['domain_id']}, } - } + }, }, 'scope': { 'project': { 'id': self.project['id'], } - } + }, } - }) + }, + ) return r.headers.get('X-Subject-Token') def get_system_scoped_token(self): @@ -395,17 +423,14 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'user': { 'name': self.user['name'], 'password': self.user['password'], - 'domain': { - 'id': self.user['domain_id'] - } + 'domain': {'id': self.user['domain_id']}, } - } + }, }, - 'scope': { - 'system': {'all': True} - } + 'scope': {'system': {'all': True}}, } - }) + }, + ) return r.headers.get('X-Subject-Token') def get_domain_scoped_token(self): @@ -421,19 +446,18 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'user': { 'name': self.user['name'], 'password': self.user['password'], - 'domain': { - 'id': self.user['domain_id'] - } + 'domain': {'id': self.user['domain_id']}, } - } + }, }, 'scope': { 'domain': { 'id': self.domain['id'], } - } + }, } - }) + }, + ) return r.headers.get('X-Subject-Token') def get_application_credentials_token(self, app_cred_id, app_cred_secret): @@ -447,11 +471,12 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, 'methods': ['application_credential'], 'application_credential': { 'id': app_cred_id, - 'secret': app_cred_secret - } + 'secret': app_cred_secret, + }, } } - }) + }, + ) return r.headers.get('X-Subject-Token') def get_requested_token(self, auth): @@ -460,10 +485,12 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, return r.headers.get('X-Subject-Token') def v3_create_token(self, auth, expected_status=http.client.CREATED): - return self.admin_request(method='POST', - path='/v3/auth/tokens', - body=auth, - expected_status=expected_status) + return self.admin_request( + method='POST', + path='/v3/auth/tokens', + body=auth, + expected_status=expected_status, + ) def v3_noauth_request(self, path, **kwargs): # request does not require auth token header @@ -489,30 +516,36 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, return self.admin_request(path=path, token=token, **kwargs) def get(self, path, expected_status=http.client.OK, **kwargs): - return self.v3_request(path, method='GET', - expected_status=expected_status, **kwargs) + return self.v3_request( + path, method='GET', expected_status=expected_status, **kwargs + ) def head(self, path, expected_status=http.client.NO_CONTENT, **kwargs): - r = self.v3_request(path, method='HEAD', - expected_status=expected_status, **kwargs) + r = self.v3_request( + path, method='HEAD', expected_status=expected_status, **kwargs + ) self.assertEqual(b'', r.body) return r def post(self, path, expected_status=http.client.CREATED, **kwargs): - return self.v3_request(path, method='POST', - expected_status=expected_status, **kwargs) + return self.v3_request( + path, method='POST', expected_status=expected_status, **kwargs + ) def put(self, path, expected_status=http.client.NO_CONTENT, **kwargs): - return self.v3_request(path, method='PUT', - expected_status=expected_status, **kwargs) + return self.v3_request( + path, method='PUT', expected_status=expected_status, **kwargs + ) def patch(self, path, expected_status=http.client.OK, **kwargs): - return self.v3_request(path, method='PATCH', - expected_status=expected_status, **kwargs) + return self.v3_request( + path, method='PATCH', expected_status=expected_status, **kwargs + ) def delete(self, path, expected_status=http.client.NO_CONTENT, **kwargs): - return self.v3_request(path, method='DELETE', - expected_status=expected_status, **kwargs) + return self.v3_request( + path, method='DELETE', expected_status=expected_status, **kwargs + ) def assertValidErrorResponse(self, r): resp = r.result @@ -532,17 +565,26 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertIn('next', links) if links['next'] is not None: - self.assertThat(links['next'], - matchers.StartsWith('http://localhost')) + self.assertThat( + links['next'], matchers.StartsWith('http://localhost') + ) self.assertIn('previous', links) if links['previous'] is not None: - self.assertThat(links['previous'], - matchers.StartsWith('http://localhost')) + self.assertThat( + links['previous'], matchers.StartsWith('http://localhost') + ) - def assertValidListResponse(self, resp, key, entity_validator, ref=None, - expected_length=None, keys_to_check=None, - resource_url=None): + def assertValidListResponse( + self, + resp, + key, + entity_validator, + ref=None, + expected_length=None, + keys_to_check=None, + resource_url=None, + ): """Make assertions common to all API list responses. If a reference is provided, it's ID will be searched for in the @@ -559,8 +601,9 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertNotEmpty(entities) # collections should have relational links - self.assertValidListLinks(resp.result.get('links'), - resource_url=resource_url) + self.assertValidListLinks( + resp.result.get('links'), resource_url=resource_url + ) for entity in entities: self.assertIsNotNone(entity) @@ -568,13 +611,15 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, entity_validator(entity) if ref: entity = [x for x in entities if x['id'] == ref['id']][0] - self.assertValidEntity(entity, ref=ref, - keys_to_check=keys_to_check) + self.assertValidEntity( + entity, ref=ref, keys_to_check=keys_to_check + ) entity_validator(entity, ref) return entities - def assertValidResponse(self, resp, key, entity_validator, *args, - **kwargs): + def assertValidResponse( + self, resp, key, entity_validator, *args, **kwargs + ): """Make assertions common to all API responses.""" entity = resp.result.get(key) self.assertIsNotNone(entity) @@ -600,8 +645,9 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertIsNotNone(entity.get('links')) self.assertIsNotNone(entity['links'].get('self')) - self.assertThat(entity['links']['self'], - matchers.StartsWith('http://localhost')) + self.assertThat( + entity['links']['self'], matchers.StartsWith('http://localhost') + ) self.assertIn(entity['id'], entity['links']['self']) if ref: @@ -629,10 +675,12 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertIsNotNone(token.get('expires_at')) expires_at = self.assertValidISO8601ExtendedFormatDatetime( - token['expires_at']) + token['expires_at'] + ) self.assertIsNotNone(token.get('issued_at')) issued_at = self.assertValidISO8601ExtendedFormatDatetime( - token['issued_at']) + token['issued_at'] + ) self.assertLess(issued_at, expires_at) self.assertIn('user', token) @@ -698,41 +746,43 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, token = self.assertValidScopedTokenResponse(r, *args, **kwargs) project_scoped_token_schema = self.generate_token_schema( - project_scoped=True) + project_scoped=True + ) if token.get('OS-TRUST:trust'): trust_properties = { 'OS-TRUST:trust': { 'type': ['object'], - 'required': ['id', 'impersonation', 'trustor_user', - 'trustee_user'], + 'required': [ + 'id', + 'impersonation', + 'trustor_user', + 'trustee_user', + ], 'properties': { 'id': {'type': 'string'}, 'impersonation': {'type': 'boolean'}, 'trustor_user': { 'type': 'object', 'required': ['id'], - 'properties': { - 'id': {'type': 'string'} - }, - 'additionalProperties': False + 'properties': {'id': {'type': 'string'}}, + 'additionalProperties': False, }, 'trustee_user': { 'type': 'object', 'required': ['id'], - 'properties': { - 'id': {'type': 'string'} - }, - 'additionalProperties': False - } + 'properties': {'id': {'type': 'string'}}, + 'additionalProperties': False, + }, }, - 'additionalProperties': False + 'additionalProperties': False, } } project_scoped_token_schema['properties'].update(trust_properties) validator_object = validators.SchemaValidator( - project_scoped_token_schema) + project_scoped_token_schema + ) validator_object.validate(token) self.assertEqual(self.role_id, token['roles'][0]['id']) @@ -772,8 +822,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertIsInstance(resp.json['links'], dict) self.assertEqual(['self'], list(resp.json['links'].keys())) self.assertEqual( - 'http://localhost/v3/auth/catalog', - resp.json['links']['self']) + 'http://localhost/v3/auth/catalog', resp.json['links']['self'] + ) def assertValidCatalog(self, entity): self.assertIsInstance(entity, list) @@ -809,7 +859,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidRegion, keys_to_check=[], *args, - **kwargs) + **kwargs + ) def assertValidRegionResponse(self, resp, *args, **kwargs): return self.assertValidResponse( @@ -818,7 +869,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidRegion, keys_to_check=[], *args, - **kwargs) + **kwargs + ) def assertValidRegion(self, entity, ref=None): self.assertIsNotNone(entity.get('description')) @@ -830,19 +882,13 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, def assertValidServiceListResponse(self, resp, *args, **kwargs): return self.assertValidListResponse( - resp, - 'services', - self.assertValidService, - *args, - **kwargs) + resp, 'services', self.assertValidService, *args, **kwargs + ) def assertValidServiceResponse(self, resp, *args, **kwargs): return self.assertValidResponse( - resp, - 'service', - self.assertValidService, - *args, - **kwargs) + resp, 'service', self.assertValidService, *args, **kwargs + ) def assertValidService(self, entity, ref=None): self.assertIsNotNone(entity.get('type')) @@ -855,19 +901,13 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, def assertValidEndpointListResponse(self, resp, *args, **kwargs): return self.assertValidListResponse( - resp, - 'endpoints', - self.assertValidEndpoint, - *args, - **kwargs) + resp, 'endpoints', self.assertValidEndpoint, *args, **kwargs + ) def assertValidEndpointResponse(self, resp, *args, **kwargs): return self.assertValidResponse( - resp, - 'endpoint', - self.assertValidEndpoint, - *args, - **kwargs) + resp, 'endpoint', self.assertValidEndpoint, *args, **kwargs + ) def assertValidEndpoint(self, entity, ref=None): self.assertIsNotNone(entity.get('interface')) @@ -889,19 +929,13 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, def assertValidDomainListResponse(self, resp, *args, **kwargs): return self.assertValidListResponse( - resp, - 'domains', - self.assertValidDomain, - *args, - **kwargs) + resp, 'domains', self.assertValidDomain, *args, **kwargs + ) def assertValidDomainResponse(self, resp, *args, **kwargs): return self.assertValidResponse( - resp, - 'domain', - self.assertValidDomain, - *args, - **kwargs) + resp, 'domain', self.assertValidDomain, *args, **kwargs + ) def assertValidDomain(self, entity, ref=None): if ref: @@ -912,19 +946,13 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, def assertValidProjectListResponse(self, resp, *args, **kwargs): return self.assertValidListResponse( - resp, - 'projects', - self.assertValidProject, - *args, - **kwargs) + resp, 'projects', self.assertValidProject, *args, **kwargs + ) def assertValidProjectResponse(self, resp, *args, **kwargs): return self.assertValidResponse( - resp, - 'project', - self.assertValidProject, - *args, - **kwargs) + resp, 'project', self.assertValidProject, *args, **kwargs + ) def assertValidProject(self, entity, ref=None): if ref: @@ -940,7 +968,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidUser, keys_to_check=['name', 'enabled'], *args, - **kwargs) + **kwargs + ) def assertValidUserResponse(self, resp, *args, **kwargs): return self.assertValidResponse( @@ -949,7 +978,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidUser, keys_to_check=['name', 'enabled'], *args, - **kwargs) + **kwargs + ) def assertValidUser(self, entity, ref=None): self.assertIsNotNone(entity.get('domain_id')) @@ -962,8 +992,9 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertEqual(ref['email'], entity['email']) if 'default_project_id' in ref: self.assertIsNotNone(ref['default_project_id']) - self.assertEqual(ref['default_project_id'], - entity['default_project_id']) + self.assertEqual( + ref['default_project_id'], entity['default_project_id'] + ) return entity # group validation @@ -975,7 +1006,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidGroup, keys_to_check=['name', 'description', 'domain_id'], *args, - **kwargs) + **kwargs + ) def assertValidGroupResponse(self, resp, *args, **kwargs): return self.assertValidResponse( @@ -984,7 +1016,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidGroup, keys_to_check=['name', 'description', 'domain_id'], *args, - **kwargs) + **kwargs + ) def assertValidGroup(self, entity, ref=None): self.assertIsNotNone(entity.get('name')) @@ -1001,7 +1034,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidCredential, keys_to_check=['blob', 'user_id', 'type'], *args, - **kwargs) + **kwargs + ) def assertValidCredentialResponse(self, resp, *args, **kwargs): return self.assertValidResponse( @@ -1010,7 +1044,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidCredential, keys_to_check=['blob', 'user_id', 'type'], *args, - **kwargs) + **kwargs + ) def assertValidCredential(self, entity, ref=None): self.assertIsNotNone(entity.get('user_id')) @@ -1034,7 +1069,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidRole, keys_to_check=['name'], *args, - **kwargs) + **kwargs + ) def assertRoleInListResponse(self, resp, ref, expected=1): found_count = 0 @@ -1058,7 +1094,8 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertValidRole, keys_to_check=['name'], *args, - **kwargs) + **kwargs + ) def assertValidRole(self, entity, ref=None): self.assertIsNotNone(entity.get('name')) @@ -1069,16 +1106,18 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, # role assignment validation - def assertValidRoleAssignmentListResponse(self, resp, expected_length=None, - resource_url=None): + def assertValidRoleAssignmentListResponse( + self, resp, expected_length=None, resource_url=None + ): entities = resp.result.get('role_assignments') if expected_length or expected_length == 0: self.assertEqual(expected_length, len(entities)) # Collections should have relational links - self.assertValidListLinks(resp.result.get('links'), - resource_url=resource_url) + self.assertValidListLinks( + resp.result.get('links'), resource_url=resource_url + ) for entity in entities: self.assertIsNotNone(entity) @@ -1119,8 +1158,9 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, links = ref.pop('links') try: self.assertLessEqual(ref.items(), entity.items()) - self.assertIn(links['assignment'], - entity['links']['assignment']) + self.assertIn( + links['assignment'], entity['links']['assignment'] + ) finally: if links: ref['links'] = links @@ -1145,19 +1185,13 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, def assertValidPolicyListResponse(self, resp, *args, **kwargs): return self.assertValidListResponse( - resp, - 'policies', - self.assertValidPolicy, - *args, - **kwargs) + resp, 'policies', self.assertValidPolicy, *args, **kwargs + ) def assertValidPolicyResponse(self, resp, *args, **kwargs): return self.assertValidResponse( - resp, - 'policy', - self.assertValidPolicy, - *args, - **kwargs) + resp, 'policy', self.assertValidPolicy, *args, **kwargs + ) def assertValidPolicy(self, entity, ref=None): self.assertIsNotNone(entity.get('blob')) @@ -1174,22 +1208,28 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, resp, 'trusts', self.assertValidTrustSummary, - keys_to_check=['trustor_user_id', - 'trustee_user_id', - 'impersonation'], + keys_to_check=[ + 'trustor_user_id', + 'trustee_user_id', + 'impersonation', + ], *args, - **kwargs) + **kwargs + ) def assertValidTrustResponse(self, resp, *args, **kwargs): return self.assertValidResponse( resp, 'trust', self.assertValidTrust, - keys_to_check=['trustor_user_id', - 'trustee_user_id', - 'impersonation'], + keys_to_check=[ + 'trustor_user_id', + 'trustee_user_id', + 'impersonation', + ], *args, - **kwargs) + **kwargs + ) def assertValidTrustSummary(self, entity, ref=None): return self.assertValidTrust(entity, ref, summary=True) @@ -1229,13 +1269,16 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, self.assertEqual(ref['project_id'], entity['project_id']) if entity.get('expires_at') or ref.get('expires_at'): entity_exp = self.assertValidISO8601ExtendedFormatDatetime( - entity['expires_at']) + entity['expires_at'] + ) ref_exp = self.assertValidISO8601ExtendedFormatDatetime( - ref['expires_at']) + ref['expires_at'] + ) self.assertCloseEnoughForGovernmentWork(entity_exp, ref_exp) else: - self.assertEqual(ref.get('expires_at'), - entity.get('expires_at')) + self.assertEqual( + ref.get('expires_at'), entity.get('expires_at') + ) return entity @@ -1243,8 +1286,17 @@ class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, def assertValidServiceProvider(self, entity, ref=None, *args, **kwargs): - attributes = frozenset(['auth_url', 'id', 'enabled', 'description', - 'links', 'relay_state_prefix', 'sp_url']) + attributes = frozenset( + [ + 'auth_url', + 'id', + 'enabled', + 'description', + 'links', + 'relay_state_prefix', + 'sp_url', + ] + ) for attribute in attributes: self.assertIsNotNone(entity.get(attribute)) @@ -1301,13 +1353,17 @@ class AuthContextMiddlewareTestCase(RestfulTestCase): def application(environ, start_response): body = b'body' - headers = [('Content-Type', 'text/html; charset=utf8'), - ('Content-Length', str(len(body)))] + headers = [ + ('Content-Type', 'text/html; charset=utf8'), + ('Content-Length', str(len(body))), + ] start_response('200 OK', headers) return [body] - app = webtest.TestApp(auth_context.AuthContextMiddleware(application), - extra_environ=extra_environ) + app = webtest.TestApp( + auth_context.AuthContextMiddleware(application), + extra_environ=extra_environ, + ) resp = app.get('/', headers={authorization.AUTH_TOKEN_HEADER: token}) self.assertEqual(b'body', resp.body) # just to make sure it worked return resp.request @@ -1319,7 +1375,8 @@ class AuthContextMiddlewareTestCase(RestfulTestCase): req = self._middleware_request(admin_token) self.assertEqual( self.user['id'], - req.environ.get(authorization.AUTH_CONTEXT_ENV)['user_id']) + req.environ.get(authorization.AUTH_CONTEXT_ENV)['user_id'], + ) def test_auth_context_override(self): overridden_context = 'OVERRIDDEN_CONTEXT' @@ -1329,8 +1386,9 @@ class AuthContextMiddlewareTestCase(RestfulTestCase): extra_environ = {authorization.AUTH_CONTEXT_ENV: overridden_context} req = self._middleware_request(token, extra_environ=extra_environ) # make sure overridden context take precedence - self.assertEqual(overridden_context, - req.environ.get(authorization.AUTH_CONTEXT_ENV)) + self.assertEqual( + overridden_context, req.environ.get(authorization.AUTH_CONTEXT_ENV) + ) def test_unscoped_token_auth_context(self): unscoped_token = self.get_unscoped_token() @@ -1342,29 +1400,36 @@ class AuthContextMiddlewareTestCase(RestfulTestCase): # unscoped token for key in ['project_id', 'domain_id', 'domain_name']: self.assertIsNone( - req.environ.get(authorization.AUTH_CONTEXT_ENV)[key]) + req.environ.get(authorization.AUTH_CONTEXT_ENV)[key] + ) def test_project_scoped_token_auth_context(self): project_scoped_token = self.get_scoped_token() req = self._middleware_request(project_scoped_token) self.assertEqual( self.project['id'], - req.environ.get(authorization.AUTH_CONTEXT_ENV)['project_id']) + req.environ.get(authorization.AUTH_CONTEXT_ENV)['project_id'], + ) def test_domain_scoped_token_auth_context(self): # grant the domain role to user path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) + self.domain['id'], + self.user['id'], + self.role['id'], + ) self.put(path=path) domain_scoped_token = self.get_domain_scoped_token() req = self._middleware_request(domain_scoped_token) self.assertEqual( self.domain['id'], - req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_id']) + req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_id'], + ) self.assertEqual( self.domain['name'], - req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_name']) + req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_name'], + ) def test_oslo_context(self): # After AuthContextMiddleware runs, an @@ -1387,8 +1452,9 @@ class AuthContextMiddlewareTestCase(RestfulTestCase): self.assertEqual(self.project['id'], req_context.project_id) self.assertIsNone(req_context.domain_id) self.assertEqual(self.user['domain_id'], req_context.user_domain_id) - self.assertEqual(self.project['domain_id'], - req_context.project_domain_id) + self.assertEqual( + self.project['domain_id'], req_context.project_domain_id + ) self.assertFalse(req_context.is_admin) def test_auth_context_app_cred_with_rule(self): @@ -1399,25 +1465,32 @@ class AuthContextMiddlewareTestCase(RestfulTestCase): # def application(environ, start_response): body = b'body' - headers = [('Content-Type', 'text/html; charset=utf8'), - ('Content-Length', str(len(body)))] + headers = [ + ('Content-Type', 'text/html; charset=utf8'), + ('Content-Length', str(len(body))), + ] start_response('200 OK', headers) return [body] - token = self.get_application_credentials_token(self.app_cred_r_id, - self.app_cred_r_secret) + token = self.get_application_credentials_token( + self.app_cred_r_id, self.app_cred_r_secret + ) # Test to failure app = webtest.TestApp(auth_context.AuthContextMiddleware(application)) - resp = app.get('/v3/projects/e3a0883d15ff409e98e59d460f583a68', - headers={authorization.AUTH_TOKEN_HEADER: token}, - status=401) + resp = app.get( + '/v3/projects/e3a0883d15ff409e98e59d460f583a68', + headers={authorization.AUTH_TOKEN_HEADER: token}, + status=401, + ) self.assertEqual('401 Unauthorized', resp.status) # Test to success app = webtest.TestApp(auth_context.AuthContextMiddleware(application)) - resp = app.get('/v3/users/3879328537914be2b394ddf57a4fc73a', - headers={authorization.AUTH_TOKEN_HEADER: token}) + resp = app.get( + '/v3/users/3879328537914be2b394ddf57a4fc73a', + headers={authorization.AUTH_TOKEN_HEADER: token}, + ) self.assertEqual('200 OK', resp.status) self.assertEqual(b'body', resp.body) # just to make sure it worked @@ -1435,16 +1508,21 @@ class JsonHomeTestMixin(object): """ def test_get_json_home(self): - resp = self.get('/', convert=False, - headers={'Accept': 'application/json-home'}) - self.assertThat(resp.headers['Content-Type'], - matchers.Equals('application/json-home')) + resp = self.get( + '/', convert=False, headers={'Accept': 'application/json-home'} + ) + self.assertThat( + resp.headers['Content-Type'], + matchers.Equals('application/json-home'), + ) resp_data = jsonutils.loads(resp.body) # Check that the example relationships are present. for rel in self.JSON_HOME_DATA: - self.assertThat(resp_data['resources'][rel], - matchers.Equals(self.JSON_HOME_DATA[rel])) + self.assertThat( + resp_data['resources'][rel], + matchers.Equals(self.JSON_HOME_DATA[rel]), + ) class AssignmentTestMixin(object): @@ -1468,7 +1546,8 @@ class AssignmentTestMixin(object): query_params += 'scope.' elif k not in ['user_id', 'group_id', 'role_id']: raise ValueError( - 'Invalid key \'%s\' in provided filters.' % k) + 'Invalid key \'%s\' in provided filters.' % k + ) query_params += '%s=%s' % (k.replace('_', '.'), v) @@ -1500,14 +1579,20 @@ class AssignmentTestMixin(object): return link def build_role_assignment_entity( - self, link=None, prior_role_link=None, **attribs): + self, link=None, prior_role_link=None, **attribs + ): """Build and return a role assignment entity with provided attributes. Provided attributes are expected to contain: domain_id or project_id, user_id or group_id, role_id and, optionally, inherited_to_projects. """ - entity = {'links': {'assignment': ( - link or self.build_role_assignment_link(**attribs))}} + entity = { + 'links': { + 'assignment': ( + link or self.build_role_assignment_link(**attribs) + ) + } + } if attribs.get('domain_id'): entity['scope'] = {'domain': {'id': attribs['domain_id']}} @@ -1520,9 +1605,10 @@ class AssignmentTestMixin(object): entity['user'] = {'id': attribs['user_id']} if attribs.get('group_id'): - entity['links']['membership'] = ('/groups/%s/users/%s' % - (attribs['group_id'], - attribs['user_id'])) + entity['links']['membership'] = '/groups/%s/users/%s' % ( + attribs['group_id'], + attribs['user_id'], + ) else: entity['group'] = {'id': attribs['group_id']} @@ -1536,13 +1622,15 @@ class AssignmentTestMixin(object): return entity - def build_role_assignment_entity_include_names(self, - domain_ref=None, - role_ref=None, - group_ref=None, - user_ref=None, - project_ref=None, - inherited_assignment=None): + def build_role_assignment_entity_include_names( + self, + domain_ref=None, + role_ref=None, + group_ref=None, + user_ref=None, + project_ref=None, + inherited_assignment=None, + ): """Build and return a role assignment entity with provided attributes. The expected attributes are: domain_ref or project_ref, @@ -1552,45 +1640,56 @@ class AssignmentTestMixin(object): attributes_for_links = {} if project_ref: dmn_name = PROVIDERS.resource_api.get_domain( - project_ref['domain_id'])['name'] + project_ref['domain_id'] + )['name'] - entity['scope'] = {'project': { - 'id': project_ref['id'], - 'name': project_ref['name'], - 'domain': { - 'id': project_ref['domain_id'], - 'name': dmn_name}}} + entity['scope'] = { + 'project': { + 'id': project_ref['id'], + 'name': project_ref['name'], + 'domain': { + 'id': project_ref['domain_id'], + 'name': dmn_name, + }, + } + } attributes_for_links['project_id'] = project_ref['id'] else: - entity['scope'] = {'domain': {'id': domain_ref['id'], - 'name': domain_ref['name']}} + entity['scope'] = { + 'domain': {'id': domain_ref['id'], 'name': domain_ref['name']} + } attributes_for_links['domain_id'] = domain_ref['id'] if user_ref: dmn_name = PROVIDERS.resource_api.get_domain( - user_ref['domain_id'])['name'] - entity['user'] = {'id': user_ref['id'], - 'name': user_ref['name'], - 'domain': {'id': user_ref['domain_id'], - 'name': dmn_name}} + user_ref['domain_id'] + )['name'] + entity['user'] = { + 'id': user_ref['id'], + 'name': user_ref['name'], + 'domain': {'id': user_ref['domain_id'], 'name': dmn_name}, + } attributes_for_links['user_id'] = user_ref['id'] else: dmn_name = PROVIDERS.resource_api.get_domain( - group_ref['domain_id'])['name'] - entity['group'] = {'id': group_ref['id'], - 'name': group_ref['name'], - 'domain': { - 'id': group_ref['domain_id'], - 'name': dmn_name}} + group_ref['domain_id'] + )['name'] + entity['group'] = { + 'id': group_ref['id'], + 'name': group_ref['name'], + 'domain': {'id': group_ref['domain_id'], 'name': dmn_name}, + } attributes_for_links['group_id'] = group_ref['id'] if role_ref: - entity['role'] = {'id': role_ref['id'], - 'name': role_ref['name']} + entity['role'] = {'id': role_ref['id'], 'name': role_ref['name']} if role_ref['domain_id']: dmn_name = PROVIDERS.resource_api.get_domain( - role_ref['domain_id'])['name'] - entity['role']['domain'] = {'id': role_ref['domain_id'], - 'name': dmn_name} + role_ref['domain_id'] + )['name'] + entity['role']['domain'] = { + 'id': role_ref['domain_id'], + 'name': dmn_name, + } attributes_for_links['role_id'] = role_ref['id'] if inherited_assignment: @@ -1598,6 +1697,7 @@ class AssignmentTestMixin(object): attributes_for_links['inherited_to_projects'] = True entity['links']['assignment'] = self.build_role_assignment_link( - **attributes_for_links) + **attributes_for_links + ) return entity diff --git a/keystone/tests/unit/test_v3_application_credential.py b/keystone/tests/unit/test_v3_application_credential.py index e581d0dbfa..750475022c 100644 --- a/keystone/tests/unit/test_v3_application_credential.py +++ b/keystone/tests/unit/test_v3_application_credential.py @@ -32,17 +32,21 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): def config_overrides(self): super(ApplicationCredentialTestCase, self).config_overrides() - self.config_fixture.config(group='auth', - methods='password,application_credential') + self.config_fixture.config( + group='auth', methods='password,application_credential' + ) - def _app_cred_body(self, roles=None, name=None, expires=None, secret=None, - access_rules=None): + def _app_cred_body( + self, + roles=None, + name=None, + expires=None, + secret=None, + access_rules=None, + ): name = name or uuid.uuid4().hex description = 'Credential for backups' - app_cred_data = { - 'name': name, - 'description': description - } + app_cred_data = {'name': name, 'description': description} if roles: app_cred_data['roles'] = roles if expires: @@ -62,7 +66,8 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) # Create operation returns the secret self.assertIn('secret', resp.json['application_credential']) # But not the stored hash @@ -108,7 +113,8 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) self.assertEqual(secret, resp.json['application_credential']['secret']) def test_create_application_credential_roles_from_token(self): @@ -119,34 +125,43 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) - self.assertThat(resp.json['application_credential']['roles'], - matchers.HasLength(1)) + headers={'X-Auth-Token': token}, + ) + self.assertThat( + resp.json['application_credential']['roles'], + matchers.HasLength(1), + ) self.assertEqual( resp.json['application_credential']['roles'][0]['id'], - self.role_id) + self.role_id, + ) def test_create_application_credential_wrong_user(self): - wrong_user = unit.create_user(PROVIDERS.identity_api, - test_v3.DEFAULT_DOMAIN_ID) + wrong_user = unit.create_user( + PROVIDERS.identity_api, test_v3.DEFAULT_DOMAIN_ID + ) with self.test_client() as c: roles = [{'id': self.role_id}] app_cred_body = self._app_cred_body(roles=roles) token = self.get_scoped_token() - c.post('/v3/users/%s/application_credentials' % wrong_user['id'], - json=app_cred_body, - expected_status_code=http.client.FORBIDDEN, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % wrong_user['id'], + json=app_cred_body, + expected_status_code=http.client.FORBIDDEN, + headers={'X-Auth-Token': token}, + ) def test_create_application_credential_bad_role(self): with self.test_client() as c: roles = [{'id': uuid.uuid4().hex}] app_cred_body = self._app_cred_body(roles=roles) token = self.get_scoped_token() - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body, - expected_status_code=http.client.BAD_REQUEST, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body, + expected_status_code=http.client.BAD_REQUEST, + headers={'X-Auth-Token': token}, + ) def test_create_application_credential_with_expiration(self): with self.test_client() as c: @@ -155,10 +170,12 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): expires = str(expires) app_cred_body = self._app_cred_body(roles=roles, expires=expires) token = self.get_scoped_token() - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body, - expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body, + expected_status_code=http.client.CREATED, + headers={'X-Auth-Token': token}, + ) def test_create_application_credential_invalid_expiration_fmt(self): with self.test_client() as c: @@ -166,10 +183,12 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): expires = 'next tuesday' app_cred_body = self._app_cred_body(roles=roles, expires=expires) token = self.get_scoped_token() - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body, - expected_status_code=http.client.BAD_REQUEST, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body, + expected_status_code=http.client.BAD_REQUEST, + headers={'X-Auth-Token': token}, + ) def test_create_application_credential_already_expired(self): with self.test_client() as c: @@ -177,10 +196,12 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): expires = datetime.datetime.utcnow() - datetime.timedelta(hours=1) app_cred_body = self._app_cred_body(roles=roles, expires=expires) token = self.get_scoped_token() - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body, - expected_status_code=http.client.BAD_REQUEST, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body, + expected_status_code=http.client.BAD_REQUEST, + headers={'X-Auth-Token': token}, + ) def test_create_application_credential_with_application_credential(self): with self.test_client() as c: @@ -191,24 +212,30 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body_1, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) auth_data = self.build_authentication_request( app_cred_id=app_cred_1.json['application_credential']['id'], - secret=app_cred_1.json['application_credential']['secret']) + secret=app_cred_1.json['application_credential']['secret'], + ) token_data = self.v3_create_token( - auth_data, expected_status=http.client.CREATED) + auth_data, expected_status=http.client.CREATED + ) app_cred_body_2 = self._app_cred_body(roles=roles) token = token_data.headers['x-subject-token'] - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body_2, - expected_status_code=http.client.FORBIDDEN, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body_2, + expected_status_code=http.client.FORBIDDEN, + headers={'X-Auth-Token': token}, + ) def test_create_application_credential_with_trust(self): second_role = unit.new_role_ref(name='reader') PROVIDERS.role_api.create_role(second_role['id'], second_role) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_id, self.project_id, second_role['id']) + self.user_id, self.project_id, second_role['id'] + ) with self.test_client() as c: pw_token = self.get_scoped_token() # create a self-trust - only the roles are important for this test @@ -216,24 +243,31 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): trustor_user_id=self.user_id, trustee_user_id=self.user_id, project_id=self.project_id, - role_ids=[second_role['id']]) - resp = c.post('/v3/OS-TRUST/trusts', - headers={'X-Auth-Token': pw_token}, - json={'trust': trust_ref}) + role_ids=[second_role['id']], + ) + resp = c.post( + '/v3/OS-TRUST/trusts', + headers={'X-Auth-Token': pw_token}, + json={'trust': trust_ref}, + ) trust_id = resp.json['trust']['id'] trust_auth = self.build_authentication_request( user_id=self.user_id, password=self.user['password'], - trust_id=trust_id) - trust_token = self.v3_create_token( - trust_auth).headers['X-Subject-Token'] + trust_id=trust_id, + ) + trust_token = self.v3_create_token(trust_auth).headers[ + 'X-Subject-Token' + ] app_cred = self._app_cred_body(roles=[{'id': self.role_id}]) # only the roles from the trust token should be allowed, even if # the user has the role assigned on the project - c.post('/v3/users/%s/application_credentials' % self.user_id, - headers={'X-Auth-Token': trust_token}, - json=app_cred, - expected_status_code=http.client.BAD_REQUEST) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + headers={'X-Auth-Token': trust_token}, + json=app_cred, + expected_status_code=http.client.BAD_REQUEST, + ) def test_create_application_credential_allow_recursion(self): with self.test_client() as c: @@ -245,18 +279,24 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body_1, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) auth_data = self.build_authentication_request( app_cred_id=app_cred_1.json['application_credential']['id'], - secret=app_cred_1.json['application_credential']['secret']) + secret=app_cred_1.json['application_credential']['secret'], + ) token_data = self.v3_create_token( - auth_data, expected_status=http.client.CREATED) + auth_data, expected_status=http.client.CREATED + ) app_cred_body_2 = self._app_cred_body(roles=roles) - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body_2, - expected_status_code=http.client.CREATED, - headers={ - 'x-Auth-Token': token_data.headers['x-subject-token']}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body_2, + expected_status_code=http.client.CREATED, + headers={ + 'x-Auth-Token': token_data.headers['x-subject-token'] + }, + ) def test_create_application_credential_with_access_rules(self): roles = [{'id': self.role_id}] @@ -267,43 +307,57 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): 'service': 'identity', } ] - app_cred_body = self._app_cred_body(roles=roles, - access_rules=access_rules) + app_cred_body = self._app_cred_body( + roles=roles, access_rules=access_rules + ) with self.test_client() as c: token = self.get_scoped_token() resp = c.post( '/v3/users/%s/application_credentials' % self.user_id, headers={'X-Auth-Token': token}, json=app_cred_body, - expected_status_code=http.client.CREATED) + expected_status_code=http.client.CREATED, + ) app_cred_id = resp.json['application_credential']['id'] - resp_access_rules = ( - resp.json['application_credential']['access_rules']) + resp_access_rules = resp.json['application_credential'][ + 'access_rules' + ] access_rule_id = resp_access_rules[0].pop('id') self.assertEqual(access_rules[0], resp_access_rules[0]) - resp = c.get('/v3/users/%s/access_rules' % self.user_id, - headers={'X-Auth-Token': token}) + resp = c.get( + '/v3/users/%s/access_rules' % self.user_id, + headers={'X-Auth-Token': token}, + ) resp_access_rule = resp.json['access_rules'][0] resp_access_rule.pop('id') resp_access_rule.pop('links') self.assertEqual(access_rules[0], resp_access_rule) - resp = c.get('/v3/users/%s/access_rules/%s' % ( - self.user_id, access_rule_id), headers={'X-Auth-Token': token}) + resp = c.get( + '/v3/users/%s/access_rules/%s' + % (self.user_id, access_rule_id), + headers={'X-Auth-Token': token}, + ) resp_access_rule = resp.json['access_rule'] resp_access_rule.pop('id') resp_access_rule.pop('links') self.assertEqual(access_rules[0], resp_access_rule) # can't delete an access rule in use - c.delete('/v3/users/%s/access_rules/%s' % ( - self.user_id, access_rule_id), - headers={'X-Auth-Token': token}, - expected_status_code=http.client.FORBIDDEN) - c.delete('/v3/users/%s/application_credentials/%s' % ( - self.user_id, app_cred_id), - headers={'X-Auth-Token': token}) - c.delete('/v3/users/%s/access_rules/%s' % ( - self.user_id, access_rule_id), - headers={'X-Auth-Token': token}) + c.delete( + '/v3/users/%s/access_rules/%s' + % (self.user_id, access_rule_id), + headers={'X-Auth-Token': token}, + expected_status_code=http.client.FORBIDDEN, + ) + c.delete( + '/v3/users/%s/application_credentials/%s' + % (self.user_id, app_cred_id), + headers={'X-Auth-Token': token}, + ) + c.delete( + '/v3/users/%s/access_rules/%s' + % (self.user_id, access_rule_id), + headers={'X-Auth-Token': token}, + ) def test_create_application_credential_with_duplicate_access_rule(self): roles = [{'id': self.role_id}] @@ -314,29 +368,33 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): 'service': 'identity', } ] - app_cred_body_1 = self._app_cred_body(roles=roles, - access_rules=access_rules) + app_cred_body_1 = self._app_cred_body( + roles=roles, access_rules=access_rules + ) with self.test_client() as c: token = self.get_scoped_token() resp = c.post( '/v3/users/%s/application_credentials' % self.user_id, headers={'X-Auth-Token': token}, json=app_cred_body_1, - expected_status_code=http.client.CREATED) + expected_status_code=http.client.CREATED, + ) resp_access_rules = resp.json['application_credential']['access_rules'] self.assertIn('id', resp_access_rules[0]) access_rule_id = resp_access_rules[0].pop('id') self.assertEqual(access_rules[0], resp_access_rules[0]) - app_cred_body_2 = self._app_cred_body(roles=roles, - access_rules=access_rules) + app_cred_body_2 = self._app_cred_body( + roles=roles, access_rules=access_rules + ) with self.test_client() as c: token = self.get_scoped_token() resp = c.post( '/v3/users/%s/application_credentials' % self.user_id, headers={'X-Auth-Token': token}, json=app_cred_body_2, - expected_status_code=http.client.CREATED) + expected_status_code=http.client.CREATED, + ) resp_access_rules = resp.json['application_credential']['access_rules'] self.assertEqual(access_rule_id, resp_access_rules[0]['id']) @@ -349,15 +407,17 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): 'service': 'identity', } ] - app_cred_body_1 = self._app_cred_body(roles=roles, - access_rules=access_rules) + app_cred_body_1 = self._app_cred_body( + roles=roles, access_rules=access_rules + ) with self.test_client() as c: token = self.get_scoped_token() resp = c.post( '/v3/users/%s/application_credentials' % self.user_id, headers={'X-Auth-Token': token}, json=app_cred_body_1, - expected_status_code=http.client.CREATED) + expected_status_code=http.client.CREATED, + ) resp_access_rules = resp.json['application_credential']['access_rules'] access_rule_id = resp_access_rules self.assertIn('id', resp_access_rules[0]) @@ -365,46 +425,59 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): self.assertEqual(access_rules[0], resp_access_rules[0]) access_rules = [{'id': access_rule_id}] - app_cred_body_2 = self._app_cred_body(roles=roles, - access_rules=access_rules) + app_cred_body_2 = self._app_cred_body( + roles=roles, access_rules=access_rules + ) with self.test_client() as c: token = self.get_scoped_token() resp = c.post( '/v3/users/%s/application_credentials' % self.user_id, headers={'X-Auth-Token': token}, json=app_cred_body_2, - expected_status_code=http.client.CREATED) + expected_status_code=http.client.CREATED, + ) resp_access_rules = resp.json['application_credential']['access_rules'] self.assertEqual(access_rule_id, resp_access_rules[0]['id']) def test_list_application_credentials(self): with self.test_client() as c: token = self.get_scoped_token() - resp = c.get('/v3/users/%s/application_credentials' % self.user_id, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + resp = c.get( + '/v3/users/%s/application_credentials' % self.user_id, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) self.assertEqual([], resp.json['application_credentials']) roles = [{'id': self.role_id}] app_cred_body = self._app_cred_body(roles=roles) - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body, - expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) - resp = c.get('/v3/users/%s/application_credentials' % self.user_id, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body, + expected_status_code=http.client.CREATED, + headers={'X-Auth-Token': token}, + ) + resp = c.get( + '/v3/users/%s/application_credentials' % self.user_id, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) self.assertEqual(1, len(resp.json['application_credentials'])) self.assertNotIn('secret', resp.json['application_credentials'][0]) - self.assertNotIn('secret_hash', - resp.json['application_credentials'][0]) + self.assertNotIn( + 'secret_hash', resp.json['application_credentials'][0] + ) app_cred_body['application_credential']['name'] = 'two' - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body, - expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) - resp = c.get('/v3/users/%s/application_credentials' % self.user_id, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body, + expected_status_code=http.client.CREATED, + headers={'X-Auth-Token': token}, + ) + resp = c.get( + '/v3/users/%s/application_credentials' % self.user_id, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) self.assertEqual(2, len(resp.json['application_credentials'])) for ac in resp.json['application_credentials']: self.assertNotIn('secret', ac) @@ -414,26 +487,35 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): second_role = unit.new_role_ref(name='test_new_role') PROVIDERS.role_api.create_role(second_role['id'], second_role) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_id, self.project_id, second_role['id']) + self.user_id, self.project_id, second_role['id'] + ) with self.test_client() as c: token = self.get_scoped_token() - resp = c.get('/v3/users/%s/application_credentials' % self.user_id, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + resp = c.get( + '/v3/users/%s/application_credentials' % self.user_id, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) self.assertEqual([], resp.json['application_credentials']) roles = [{'id': second_role['id']}] app_cred_body = self._app_cred_body(roles=roles) - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body, - expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) - resp = c.get('/v3/users/%s/application_credentials' % self.user_id, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body, + expected_status_code=http.client.CREATED, + headers={'X-Auth-Token': token}, + ) + resp = c.get( + '/v3/users/%s/application_credentials' % self.user_id, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) PROVIDERS.role_api.delete_role(second_role['id']) - resp = c.get('/v3/users/%s/application_credentials' % self.user_id, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + resp = c.get( + '/v3/users/%s/application_credentials' % self.user_id, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) def test_list_application_credentials_by_name(self): with self.test_client() as c: @@ -441,34 +523,48 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): app_cred_body = self._app_cred_body(roles=roles) token = self.get_scoped_token() name = app_cred_body['application_credential']['name'] - search_path = ('/v3/users/%(user_id)s/application_credentials?' - 'name=%(name)s') % {'user_id': self.user_id, - 'name': name} - resp = c.get(search_path, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + search_path = ( + '/v3/users/%(user_id)s/application_credentials?' + 'name=%(name)s' + ) % {'user_id': self.user_id, 'name': name} + resp = c.get( + search_path, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) self.assertEqual([], resp.json['application_credentials']) resp = c.post( '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) - resp = c.get(search_path, expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) + resp = c.get( + search_path, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) self.assertEqual(1, len(resp.json['application_credentials'])) self.assertNotIn('secret', resp.json['application_credentials'][0]) - self.assertNotIn('secret_hash', - resp.json['application_credentials'][0]) + self.assertNotIn( + 'secret_hash', resp.json['application_credentials'][0] + ) app_cred_body['application_credential']['name'] = 'two' - c.post('/v3/users/%s/application_credentials' % self.user_id, - json=app_cred_body, - expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) - resp = c.get(search_path, expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + c.post( + '/v3/users/%s/application_credentials' % self.user_id, + json=app_cred_body, + expected_status_code=http.client.CREATED, + headers={'X-Auth-Token': token}, + ) + resp = c.get( + search_path, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) self.assertEqual(1, len(resp.json['application_credentials'])) - self.assertEqual(resp.json['application_credentials'][0]['name'], - name) + self.assertEqual( + resp.json['application_credentials'][0]['name'], name + ) def test_get_head_application_credential(self): with self.test_client() as c: @@ -479,31 +575,44 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) app_cred_id = resp.json['application_credential']['id'] - c.head('/v3%s' % MEMBER_PATH_FMT % {'user_id': self.user_id, - 'app_cred_id': app_cred_id}, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + c.head( + '/v3%s' + % MEMBER_PATH_FMT + % {'user_id': self.user_id, 'app_cred_id': app_cred_id}, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) expected_response = resp.json expected_response['application_credential'].pop('secret') - resp = c.get('/v3%s' % MEMBER_PATH_FMT % {'user_id': self.user_id, - 'app_cred_id': app_cred_id}, - expected_status_code=http.client.OK, - headers={'X-Auth-Token': token}) + resp = c.get( + '/v3%s' + % MEMBER_PATH_FMT + % {'user_id': self.user_id, 'app_cred_id': app_cred_id}, + expected_status_code=http.client.OK, + headers={'X-Auth-Token': token}, + ) self.assertDictEqual(resp.json, expected_response) def test_get_head_application_credential_not_found(self): with self.test_client() as c: token = self.get_scoped_token() - c.head('/v3%s' % MEMBER_PATH_FMT % {'user_id': self.user_id, - 'app_cred_id': uuid.uuid4().hex}, - expected_status_code=http.client.NOT_FOUND, - headers={'X-Auth-Token': token}) - c.get('/v3%s' % MEMBER_PATH_FMT % {'user_id': self.user_id, - 'app_cred_id': uuid.uuid4().hex}, - expected_status_code=http.client.NOT_FOUND, - headers={'X-Auth-Token': token}) + c.head( + '/v3%s' + % MEMBER_PATH_FMT + % {'user_id': self.user_id, 'app_cred_id': uuid.uuid4().hex}, + expected_status_code=http.client.NOT_FOUND, + headers={'X-Auth-Token': token}, + ) + c.get( + '/v3%s' + % MEMBER_PATH_FMT + % {'user_id': self.user_id, 'app_cred_id': uuid.uuid4().hex}, + expected_status_code=http.client.NOT_FOUND, + headers={'X-Auth-Token': token}, + ) def test_delete_application_credential(self): with self.test_client() as c: @@ -514,20 +623,27 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) app_cred_id = resp.json['application_credential']['id'] - c.delete('/v3%s' % MEMBER_PATH_FMT % {'user_id': self.user_id, - 'app_cred_id': app_cred_id}, - expected_status_code=http.client.NO_CONTENT, - headers={'X-Auth-Token': token}) + c.delete( + '/v3%s' + % MEMBER_PATH_FMT + % {'user_id': self.user_id, 'app_cred_id': app_cred_id}, + expected_status_code=http.client.NO_CONTENT, + headers={'X-Auth-Token': token}, + ) def test_delete_application_credential_not_found(self): with self.test_client() as c: token = self.get_scoped_token() - c.delete('/v3%s' % MEMBER_PATH_FMT % {'user_id': self.user_id, - 'app_cred_id': uuid.uuid4().hex}, - expected_status_code=http.client.NOT_FOUND, - headers={'X-Auth-Token': token}) + c.delete( + '/v3%s' + % MEMBER_PATH_FMT + % {'user_id': self.user_id, 'app_cred_id': uuid.uuid4().hex}, + expected_status_code=http.client.NOT_FOUND, + headers={'X-Auth-Token': token}, + ) def test_delete_application_credential_with_application_credential(self): with self.test_client() as c: @@ -538,20 +654,32 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) auth_data = self.build_authentication_request( app_cred_id=app_cred.json['application_credential']['id'], - secret=app_cred.json['application_credential']['secret']) + secret=app_cred.json['application_credential']['secret'], + ) token_data = self.v3_create_token( - auth_data, expected_status=http.client.CREATED) - member_path = '/v3%s' % MEMBER_PATH_FMT % { - 'user_id': self.user_id, - 'app_cred_id': app_cred.json['application_credential']['id']} + auth_data, expected_status=http.client.CREATED + ) + member_path = ( + '/v3%s' + % MEMBER_PATH_FMT + % { + 'user_id': self.user_id, + 'app_cred_id': app_cred.json['application_credential'][ + 'id' + ], + } + ) token = token_data.headers['x-subject-token'] - c.delete(member_path, - json=app_cred_body, - expected_status_code=http.client.FORBIDDEN, - headers={'X-Auth-Token': token}) + c.delete( + member_path, + json=app_cred_body, + expected_status_code=http.client.FORBIDDEN, + headers={'X-Auth-Token': token}, + ) def test_delete_application_credential_allow_recursion(self): with self.test_client() as c: @@ -563,21 +691,33 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) auth_data = self.build_authentication_request( app_cred_id=app_cred.json['application_credential']['id'], - secret=app_cred.json['application_credential']['secret']) + secret=app_cred.json['application_credential']['secret'], + ) token_data = self.v3_create_token( - auth_data, expected_status=http.client.CREATED) - member_path = '/v3%s' % MEMBER_PATH_FMT % { - 'user_id': self.user_id, - 'app_cred_id': app_cred.json['application_credential']['id']} - c.delete(member_path, - json=app_cred_body, - expected_status_code=http.client.NO_CONTENT, - headers={ - 'x-Auth-Token': token_data.headers['x-subject-token'] - }) + auth_data, expected_status=http.client.CREATED + ) + member_path = ( + '/v3%s' + % MEMBER_PATH_FMT + % { + 'user_id': self.user_id, + 'app_cred_id': app_cred.json['application_credential'][ + 'id' + ], + } + ) + c.delete( + member_path, + json=app_cred_body, + expected_status_code=http.client.NO_CONTENT, + headers={ + 'x-Auth-Token': token_data.headers['x-subject-token'] + }, + ) def test_update_application_credential(self): with self.test_client() as c: @@ -588,18 +728,24 @@ class ApplicationCredentialTestCase(test_v3.RestfulTestCase): '/v3/users/%s/application_credentials' % self.user_id, json=app_cred_body, expected_status_code=http.client.CREATED, - headers={'X-Auth-Token': token}) + headers={'X-Auth-Token': token}, + ) # Application credentials are immutable app_cred_body['application_credential'][ - 'description'] = "New Things" + 'description' + ] = "New Things" app_cred_id = resp.json['application_credential']['id'] # NOTE(morgan): when the whole test case is converted to using # flask test_client, this extra v3 prefix will # need to be rolled into the base MEMBER_PATH_FMT - member_path = '/v3%s' % MEMBER_PATH_FMT % { - 'user_id': self.user_id, - 'app_cred_id': app_cred_id} - c.patch(member_path, - json=app_cred_body, - expected_status_code=http.client.METHOD_NOT_ALLOWED, - headers={'X-Auth-Token': token}) + member_path = ( + '/v3%s' + % MEMBER_PATH_FMT + % {'user_id': self.user_id, 'app_cred_id': app_cred_id} + ) + c.patch( + member_path, + json=app_cred_body, + expected_status_code=http.client.METHOD_NOT_ALLOWED, + headers={'X-Auth-Token': token}, + ) diff --git a/keystone/tests/unit/test_v3_assignment.py b/keystone/tests/unit/test_v3_assignment.py index 605db50294..a324deddd4 100644 --- a/keystone/tests/unit/test_v3_assignment.py +++ b/keystone/tests/unit/test_v3_assignment.py @@ -42,28 +42,24 @@ class SystemRoleAssignmentMixin(object): def _create_group(self): body = { - 'group': { - 'domain_id': self.domain_id, - 'name': uuid.uuid4().hex - } + 'group': {'domain_id': self.domain_id, 'name': uuid.uuid4().hex} } response = self.post('/groups/', body=body) return response.json_body['group'] def _create_user(self): body = { - 'user': { - 'domain_id': self.domain_id, - 'name': uuid.uuid4().hex - } + 'user': {'domain_id': self.domain_id, 'name': uuid.uuid4().hex} } response = self.post('/users/', body=body) return response.json_body['user'] -class AssignmentTestCase(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin, - SystemRoleAssignmentMixin): +class AssignmentTestCase( + test_v3.RestfulTestCase, + test_v3.AssignmentTestMixin, + SystemRoleAssignmentMixin, +): """Test roles and role assignments.""" def setUp(self): @@ -78,28 +74,29 @@ class AssignmentTestCase(test_v3.RestfulTestCase, def test_create_role(self): """Call ``POST /roles``.""" ref = unit.new_role_ref() - r = self.post( - '/roles', - body={'role': ref}) + r = self.post('/roles', body={'role': ref}) return self.assertValidRoleResponse(r, ref) def test_create_role_bad_request(self): """Call ``POST /roles``.""" - self.post('/roles', body={'role': {}}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/roles', + body={'role': {}}, + expected_status=http.client.BAD_REQUEST, + ) def test_list_head_roles(self): """Call ``GET & HEAD /roles``.""" resource_url = '/roles' r = self.get(resource_url) - self.assertValidRoleListResponse(r, ref=self.role, - resource_url=resource_url) + self.assertValidRoleListResponse( + r, ref=self.role, resource_url=resource_url + ) self.head(resource_url, expected_status=http.client.OK) def test_get_head_role(self): """Call ``GET & HEAD /roles/{role_id}``.""" - resource_url = '/roles/%(role_id)s' % { - 'role_id': self.role_id} + resource_url = '/roles/%(role_id)s' % {'role_id': self.role_id} r = self.get(resource_url) self.assertValidRoleResponse(r, self.role) self.head(resource_url, expected_status=http.client.OK) @@ -108,15 +105,15 @@ class AssignmentTestCase(test_v3.RestfulTestCase, """Call ``PATCH /roles/{role_id}``.""" ref = unit.new_role_ref() del ref['id'] - r = self.patch('/roles/%(role_id)s' % { - 'role_id': self.role_id}, - body={'role': ref}) + r = self.patch( + '/roles/%(role_id)s' % {'role_id': self.role_id}, + body={'role': ref}, + ) self.assertValidRoleResponse(r, ref) def test_delete_role(self): """Call ``DELETE /roles/{role_id}``.""" - self.delete('/roles/%(role_id)s' % { - 'role_id': self.role_id}) + self.delete('/roles/%(role_id)s' % {'role_id': self.role_id}) # Role Grants tests @@ -124,26 +121,26 @@ class AssignmentTestCase(test_v3.RestfulTestCase, role = unit.new_role_ref() PROVIDERS.role_api.create_role(role['id'], role) - collection_url = ( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.project['id'], - 'user_id': self.user['id']}) + collection_url = '/projects/%(project_id)s/users/%(user_id)s/roles' % { + 'project_id': self.project['id'], + 'user_id': self.user['id'], + } member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': role['id']} + 'role_id': role['id'], + } # There is a role assignment for self.user on self.project r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, - expected_length=1) + self.assertValidRoleListResponse(r, ref=self.role, expected_length=1) self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role, - resource_url=collection_url, - expected_length=2) + self.assertValidRoleListResponse( + r, ref=role, resource_url=collection_url, expected_length=2 + ) self.head(collection_url, expected_status=http.client.OK) self.delete(member_url) @@ -161,12 +158,14 @@ class AssignmentTestCase(test_v3.RestfulTestCase, """ user_id = uuid.uuid4().hex - collection_url = ( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.project['id'], 'user_id': user_id}) + collection_url = '/projects/%(project_id)s/users/%(user_id)s/roles' % { + 'project_id': self.project['id'], + 'user_id': user_id, + } member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } self.put(member_url, expected_status=http.client.NOT_FOUND) self.head(member_url, expected_status=http.client.NOT_FOUND) @@ -176,19 +175,21 @@ class AssignmentTestCase(test_v3.RestfulTestCase, time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, - 'user_id': self.user['id']}) + '/domains/%(domain_id)s/users/%(user_id)s/roles' + % {'domain_id': self.domain_id, 'user_id': self.user['id']} + ) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=self.role, resource_url=collection_url + ) self.head(collection_url, expected_status=http.client.OK) self.delete(member_url) @@ -198,8 +199,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, # event. frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) r = self.get(collection_url) - self.assertValidRoleListResponse(r, expected_length=0, - resource_url=collection_url) + self.assertValidRoleListResponse( + r, expected_length=0, resource_url=collection_url + ) self.head(collection_url, expected_status=http.client.OK) def test_crud_user_domain_role_grants_no_user(self): @@ -211,12 +213,14 @@ class AssignmentTestCase(test_v3.RestfulTestCase, """ user_id = uuid.uuid4().hex - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, 'user_id': user_id}) + collection_url = '/domains/%(domain_id)s/users/%(user_id)s/roles' % { + 'domain_id': self.domain_id, + 'user_id': user_id, + } member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } self.put(member_url, expected_status=http.client.NOT_FOUND) self.head(member_url, expected_status=http.client.NOT_FOUND) @@ -226,19 +230,21 @@ class AssignmentTestCase(test_v3.RestfulTestCase, time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: collection_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/roles' % { - 'project_id': self.project_id, - 'group_id': self.group_id}) + '/projects/%(project_id)s/groups/%(group_id)s/roles' + % {'project_id': self.project_id, 'group_id': self.group_id} + ) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=self.role, resource_url=collection_url + ) self.head(collection_url, expected_status=http.client.OK) self.delete(member_url) @@ -248,8 +254,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, # event. frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) r = self.get(collection_url) - self.assertValidRoleListResponse(r, expected_length=0, - resource_url=collection_url) + self.assertValidRoleListResponse( + r, expected_length=0, resource_url=collection_url + ) self.head(collection_url, expected_status=http.client.OK) def test_crud_group_project_role_grants_no_group(self): @@ -262,12 +269,13 @@ class AssignmentTestCase(test_v3.RestfulTestCase, group_id = uuid.uuid4().hex collection_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/roles' % { - 'project_id': self.project_id, - 'group_id': group_id}) + '/projects/%(project_id)s/groups/%(group_id)s/roles' + % {'project_id': self.project_id, 'group_id': group_id} + ) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } self.put(member_url, expected_status=http.client.NOT_FOUND) self.head(member_url, expected_status=http.client.NOT_FOUND) @@ -277,19 +285,21 @@ class AssignmentTestCase(test_v3.RestfulTestCase, time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: collection_url = ( - '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': self.domain_id, - 'group_id': self.group_id}) + '/domains/%(domain_id)s/groups/%(group_id)s/roles' + % {'domain_id': self.domain_id, 'group_id': self.group_id} + ) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=self.role, resource_url=collection_url + ) self.head(collection_url, expected_status=http.client.OK) self.delete(member_url) @@ -299,8 +309,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, # event. frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) r = self.get(collection_url) - self.assertValidRoleListResponse(r, expected_length=0, - resource_url=collection_url) + self.assertValidRoleListResponse( + r, expected_length=0, resource_url=collection_url + ) self.head(collection_url, expected_status=http.client.OK) def test_crud_group_domain_role_grants_no_group(self): @@ -312,13 +323,14 @@ class AssignmentTestCase(test_v3.RestfulTestCase, """ group_id = uuid.uuid4().hex - collection_url = ( - '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': self.domain_id, - 'group_id': group_id}) + collection_url = '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { + 'domain_id': self.domain_id, + 'group_id': group_id, + } member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } self.put(member_url, expected_status=http.client.NOT_FOUND) self.head(member_url, expected_status=http.client.NOT_FOUND) @@ -330,13 +342,14 @@ class AssignmentTestCase(test_v3.RestfulTestCase, new_user = unit.new_user_ref(domain_id=self.domain_id) user_ref = PROVIDERS.identity_api.create_user(new_user) # Assign the user a role on the project - collection_url = ( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.project_id, - 'user_id': user_ref['id']}) - member_url = ('%(collection_url)s/%(role_id)s' % { + collection_url = '/projects/%(project_id)s/users/%(user_id)s/roles' % { + 'project_id': self.project_id, + 'user_id': user_ref['id'], + } + member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id}) + 'role_id': self.role_id, + } self.put(member_url) # Check the user has the role assigned self.head(member_url) @@ -364,12 +377,13 @@ class AssignmentTestCase(test_v3.RestfulTestCase, # Assign the user a role on the project collection_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/roles' % { - 'project_id': self.project_id, - 'group_id': group_ref['id']}) - member_url = ('%(collection_url)s/%(role_id)s' % { + '/projects/%(project_id)s/groups/%(group_id)s/roles' + % {'project_id': self.project_id, 'group_id': group_ref['id']} + ) + member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id}) + 'role_id': self.role_id, + } self.put(member_url) # Check the user has the role assigned @@ -387,10 +401,10 @@ class AssignmentTestCase(test_v3.RestfulTestCase, def test_delete_user_before_removing_system_assignments_succeeds(self): system_role = self._create_new_role() user = self._create_user() - path = ( - '/system/users/%(user_id)s/roles/%(role_id)s' % - {'user_id': user['id'], 'role_id': system_role} - ) + path = '/system/users/%(user_id)s/roles/%(role_id)s' % { + 'user_id': user['id'], + 'role_id': system_role, + } self.put(path) response = self.get('/role_assignments') @@ -429,8 +443,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, with freezegun.freeze_time(time) as frozen_datetime: # creates grant from group on project. PROVIDERS.assignment_api.create_grant( - role_id=self.role['id'], project_id=self.project['id'], - group_id=self.group['id'] + role_id=self.role['id'], + project_id=self.project['id'], + group_id=self.group['id'], ) # adds user to the group. @@ -442,38 +457,47 @@ class AssignmentTestCase(test_v3.RestfulTestCase, auth_body = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) token_resp = self.post('/auth/tokens', body=auth_body) token = token_resp.headers.get('x-subject-token') # validates the returned token; it should be valid. - self.head('/auth/tokens', - headers={'x-subject-token': token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'x-subject-token': token}, + expected_status=http.client.OK, + ) frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) # revokes the grant from group on project. PROVIDERS.assignment_api.delete_grant( - role_id=self.role['id'], project_id=self.project['id'], - group_id=self.group['id']) + role_id=self.role['id'], + project_id=self.project['id'], + group_id=self.group['id'], + ) # revokes the direct role form user on project PROVIDERS.assignment_api.delete_grant( - role_id=self.role['id'], project_id=self.project['id'], - user_id=self.user['id'] + role_id=self.role['id'], + project_id=self.project['id'], + user_id=self.user['id'], ) frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) # validates the same token again; it should not longer be valid. - self.head('/auth/tokens', token=token, - expected_status=http.client.UNAUTHORIZED) + self.head( + '/auth/tokens', + token=token, + expected_status=http.client.UNAUTHORIZED, + ) def test_delete_group_before_removing_system_assignments_succeeds(self): system_role = self._create_new_role() group = self._create_group() - path = ( - '/system/groups/%(group_id)s/roles/%(role_id)s' % - {'group_id': group['id'], 'role_id': system_role} - ) + path = '/system/groups/%(group_id)s/roles/%(role_id)s' % { + 'group_id': group['id'], + 'role_id': system_role, + } self.put(path) response = self.get('/role_assignments') @@ -496,13 +520,14 @@ class AssignmentTestCase(test_v3.RestfulTestCase, new_project = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project(new_project['id'], new_project) - collection_url = ( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': new_project['id'], - 'user_id': self.user['id']}) + collection_url = '/projects/%(project_id)s/users/%(user_id)s/roles' % { + 'project_id': new_project['id'], + 'user_id': self.user['id'], + } member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } # create the user a grant on the new project self.put(member_url) @@ -511,8 +536,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) resp = self.get(collection_url) - self.assertValidRoleListResponse(resp, ref=self.role, - resource_url=collection_url) + self.assertValidRoleListResponse( + resp, ref=self.role, resource_url=collection_url + ) # delete the grant self.delete(member_url) @@ -527,13 +553,14 @@ class AssignmentTestCase(test_v3.RestfulTestCase, new_domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain) - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': new_domain['id'], - 'user_id': self.user['id']}) + collection_url = '/domains/%(domain_id)s/users/%(user_id)s/roles' % { + 'domain_id': new_domain['id'], + 'user_id': self.user['id'], + } member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } # create the user a grant on the new domain self.put(member_url) @@ -542,8 +569,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) resp = self.get(collection_url) - self.assertValidRoleListResponse(resp, ref=self.role, - resource_url=collection_url) + self.assertValidRoleListResponse( + resp, ref=self.role, resource_url=collection_url + ) # delete the grant self.delete(member_url) @@ -559,12 +587,13 @@ class AssignmentTestCase(test_v3.RestfulTestCase, PROVIDERS.resource_api.create_project(new_project['id'], new_project) collection_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/roles' % { - 'project_id': new_project['id'], - 'group_id': self.group['id']}) + '/projects/%(project_id)s/groups/%(group_id)s/roles' + % {'project_id': new_project['id'], 'group_id': self.group['id']} + ) member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } # create the group a grant on the new project self.put(member_url) @@ -573,8 +602,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) resp = self.get(collection_url) - self.assertValidRoleListResponse(resp, ref=self.role, - resource_url=collection_url) + self.assertValidRoleListResponse( + resp, ref=self.role, resource_url=collection_url + ) # delete the grant self.delete(member_url) @@ -589,13 +619,14 @@ class AssignmentTestCase(test_v3.RestfulTestCase, new_domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(new_domain['id'], new_domain) - collection_url = ( - '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': new_domain['id'], - 'group_id': self.group['id']}) + collection_url = '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { + 'domain_id': new_domain['id'], + 'group_id': self.group['id'], + } member_url = '%(collection_url)s/%(role_id)s' % { 'collection_url': collection_url, - 'role_id': self.role_id} + 'role_id': self.role_id, + } # create the group a grant on the new domain self.put(member_url) @@ -604,8 +635,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) resp = self.get(collection_url) - self.assertValidRoleListResponse(resp, ref=self.role, - resource_url=collection_url) + self.assertValidRoleListResponse( + resp, ref=self.role, resource_url=collection_url + ) # delete the grant self.delete(member_url) @@ -656,7 +688,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase, collection_url = '/role_assignments' r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) + r, resource_url=collection_url + ) self.head(collection_url, expected_status=http.client.OK) existing_assignments = len(r.result.get('role_assignments')) @@ -665,50 +698,60 @@ class AssignmentTestCase(test_v3.RestfulTestCase, gd_entity = self.build_role_assignment_entity( domain_id=self.domain_id, group_id=self.group_id, - role_id=role['id']) + role_id=role['id'], + ) self.put(gd_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 1, - resource_url=collection_url) + resource_url=collection_url, + ) self.assertRoleAssignmentInListResponse(r, gd_entity) self.head(collection_url, expected_status=http.client.OK) ud_entity = self.build_role_assignment_entity( domain_id=self.domain_id, user_id=user1['id'], - role_id=role['id']) + role_id=role['id'], + ) self.put(ud_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, - resource_url=collection_url) + resource_url=collection_url, + ) self.assertRoleAssignmentInListResponse(r, ud_entity) self.head(collection_url, expected_status=http.client.OK) gp_entity = self.build_role_assignment_entity( - project_id=self.project_id, group_id=self.group_id, - role_id=role['id']) + project_id=self.project_id, + group_id=self.group_id, + role_id=role['id'], + ) self.put(gp_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 3, - resource_url=collection_url) + resource_url=collection_url, + ) self.assertRoleAssignmentInListResponse(r, gp_entity) self.head(collection_url, expected_status=http.client.OK) up_entity = self.build_role_assignment_entity( - project_id=self.project_id, user_id=user1['id'], - role_id=role['id']) + project_id=self.project_id, + user_id=user1['id'], + role_id=role['id'], + ) self.put(up_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 4, - resource_url=collection_url) + resource_url=collection_url, + ) self.assertRoleAssignmentInListResponse(r, up_entity) self.head(collection_url, expected_status=http.client.OK) @@ -724,7 +767,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments, - resource_url=collection_url) + resource_url=collection_url, + ) self.assertRoleAssignmentNotInListResponse(r, gd_entity) self.assertRoleAssignmentNotInListResponse(r, ud_entity) self.assertRoleAssignmentNotInListResponse(r, gp_entity) @@ -745,29 +789,35 @@ class AssignmentTestCase(test_v3.RestfulTestCase, for each of the group members. """ - user1 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain['id']) - user2 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain['id']) + user1 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain['id'] + ) + user2 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain['id'] + ) PROVIDERS.identity_api.add_user_to_group(user1['id'], self.group['id']) PROVIDERS.identity_api.add_user_to_group(user2['id'], self.group['id']) collection_url = '/role_assignments' r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, resource_url=collection_url + ) existing_assignments = len(r.result.get('role_assignments')) - gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id, - group_id=self.group_id, - role_id=self.role_id) + gd_entity = self.build_role_assignment_entity( + domain_id=self.domain_id, + group_id=self.group_id, + role_id=self.role_id, + ) self.put(gd_entity['links']['assignment']) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 1, - resource_url=collection_url) + resource_url=collection_url, + ) self.assertRoleAssignmentInListResponse(r, gd_entity) # Now re-read the collection asking for effective roles - this @@ -778,14 +828,21 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, - resource_url=collection_url) + resource_url=collection_url, + ) ud_entity = self.build_role_assignment_entity( - link=gd_entity['links']['assignment'], domain_id=self.domain_id, - user_id=user1['id'], role_id=self.role_id) + link=gd_entity['links']['assignment'], + domain_id=self.domain_id, + user_id=user1['id'], + role_id=self.role_id, + ) self.assertRoleAssignmentInListResponse(r, ud_entity) ud_entity = self.build_role_assignment_entity( - link=gd_entity['links']['assignment'], domain_id=self.domain_id, - user_id=user2['id'], role_id=self.role_id) + link=gd_entity['links']['assignment'], + domain_id=self.domain_id, + user_id=user2['id'], + role_id=self.role_id, + ) self.assertRoleAssignmentInListResponse(r, ud_entity) def test_check_effective_values_for_role_assignments(self): @@ -814,10 +871,12 @@ class AssignmentTestCase(test_v3.RestfulTestCase, know if we are getting effective roles or not """ - user1 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain['id']) - user2 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain['id']) + user1 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain['id'] + ) + user2 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain['id'] + ) PROVIDERS.identity_api.add_user_to_group(user1['id'], self.group['id']) PROVIDERS.identity_api.add_user_to_group(user2['id'], self.group['id']) @@ -825,20 +884,24 @@ class AssignmentTestCase(test_v3.RestfulTestCase, collection_url = '/role_assignments' r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, resource_url=collection_url + ) existing_assignments = len(r.result.get('role_assignments')) - gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id, - group_id=self.group_id, - role_id=self.role_id) + gd_entity = self.build_role_assignment_entity( + domain_id=self.domain_id, + group_id=self.group_id, + role_id=self.role_id, + ) self.put(gd_entity['links']['assignment']) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 1, - resource_url=collection_url) + resource_url=collection_url, + ) self.assertRoleAssignmentInListResponse(r, gd_entity) # Now re-read the collection asking for effective roles, @@ -851,7 +914,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, - resource_url=collection_url) + resource_url=collection_url, + ) # Now set 'effective' to false explicitly - should get # back the regular roles collection_url = '/role_assignments?effective=0' @@ -860,7 +924,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 1, - resource_url=collection_url) + resource_url=collection_url, + ) # Now try setting 'effective' to 'False' explicitly- this is # NOT supported as a way of setting a query or filter # parameter to false by design. Hence we should get back @@ -871,7 +936,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, - resource_url=collection_url) + resource_url=collection_url, + ) # Now set 'effective' to True explicitly collection_url = '/role_assignments?effective=True' r = self.get(collection_url, expected_status=http.client.OK) @@ -879,7 +945,8 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse( r, expected_length=existing_assignments + 2, - resource_url=collection_url) + resource_url=collection_url, + ) def test_filtered_role_assignments(self): """Call ``GET /role_assignments?filters``. @@ -902,10 +969,12 @@ class AssignmentTestCase(test_v3.RestfulTestCase, # Since the default fixtures already assign some roles to the # user it creates, we also need a new user that will not have any # existing assignments - user1 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain['id']) - user2 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain['id']) + user1 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain['id'] + ) + user2 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain['id'] + ) group1 = unit.new_group_ref(domain_id=self.domain['id']) group1 = PROVIDERS.identity_api.create_group(group1) @@ -921,89 +990,94 @@ class AssignmentTestCase(test_v3.RestfulTestCase, # Now add one of each of the six types of assignment gd_entity = self.build_role_assignment_entity( - domain_id=self.domain_id, group_id=group1['id'], - role_id=self.role1['id']) + domain_id=self.domain_id, + group_id=group1['id'], + role_id=self.role1['id'], + ) self.put(gd_entity['links']['assignment']) - ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id, - user_id=user1['id'], - role_id=self.role2['id']) + ud_entity = self.build_role_assignment_entity( + domain_id=self.domain_id, + user_id=user1['id'], + role_id=self.role2['id'], + ) self.put(ud_entity['links']['assignment']) gp_entity = self.build_role_assignment_entity( project_id=project1['id'], group_id=group1['id'], - role_id=self.role1['id']) + role_id=self.role1['id'], + ) self.put(gp_entity['links']['assignment']) up_entity = self.build_role_assignment_entity( project_id=project1['id'], user_id=user1['id'], - role_id=self.role2['id']) + role_id=self.role2['id'], + ) self.put(up_entity['links']['assignment']) gs_entity = self.build_role_assignment_entity( - system='all', - group_id=group1['id'], - role_id=self.role1['id']) + system='all', group_id=group1['id'], role_id=self.role1['id'] + ) self.put(gs_entity['links']['assignment']) us_entity = self.build_role_assignment_entity( - system='all', - user_id=user1['id'], - role_id=self.role2['id']) + system='all', user_id=user1['id'], role_id=self.role2['id'] + ) self.put(us_entity['links']['assignment']) us2_entity = self.build_role_assignment_entity( - system='all', - user_id=user2['id'], - role_id=self.role2['id']) + system='all', user_id=user2['id'], role_id=self.role2['id'] + ) self.put(us2_entity['links']['assignment']) # Now list by various filters to make sure we get back the right ones - collection_url = ('/role_assignments?scope.project.id=%s' % - project1['id']) + collection_url = ( + '/role_assignments?scope.project.id=%s' % project1['id'] + ) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=2, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, gp_entity) - collection_url = ('/role_assignments?scope.domain.id=%s' % - self.domain['id']) + collection_url = ( + '/role_assignments?scope.domain.id=%s' % self.domain['id'] + ) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=2, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(r, ud_entity) self.assertRoleAssignmentInListResponse(r, gd_entity) collection_url = '/role_assignments?user.id=%s' % user1['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=3, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, ud_entity) collection_url = '/role_assignments?group.id=%s' % group1['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=3, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(r, gd_entity) self.assertRoleAssignmentInListResponse(r, gp_entity) collection_url = '/role_assignments?role.id=%s' % self.role1['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=3, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(r, gd_entity) self.assertRoleAssignmentInListResponse(r, gp_entity) self.assertRoleAssignmentInListResponse(r, gs_entity) @@ -1011,9 +1085,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, collection_url = '/role_assignments?role.id=%s' % self.role2['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=4, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=4, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(r, ud_entity) self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, us_entity) @@ -1022,27 +1096,26 @@ class AssignmentTestCase(test_v3.RestfulTestCase, collection_url = ( '/role_assignments?user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) + '&scope.project.id=%(project_id)s' + % {'user_id': user1['id'], 'project_id': project1['id']} + ) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=1, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=1, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(r, up_entity) # Now for a harder one - filter for user with effective # roles - this should return role assignment that were directly # assigned as well as by virtue of group membership - collection_url = ('/role_assignments?effective&user.id=%s' % - user1['id']) + collection_url = '/role_assignments?effective&user.id=%s' % user1['id'] r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=4, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=4, resource_url=collection_url + ) # Should have the two direct roles... self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, ud_entity) @@ -1050,17 +1123,26 @@ class AssignmentTestCase(test_v3.RestfulTestCase, gp1_link = self.build_role_assignment_link( project_id=project1['id'], group_id=group1['id'], - role_id=self.role1['id']) - gd1_link = self.build_role_assignment_link(domain_id=self.domain_id, - group_id=group1['id'], - role_id=self.role1['id']) + role_id=self.role1['id'], + ) + gd1_link = self.build_role_assignment_link( + domain_id=self.domain_id, + group_id=group1['id'], + role_id=self.role1['id'], + ) up1_entity = self.build_role_assignment_entity( - link=gp1_link, project_id=project1['id'], - user_id=user1['id'], role_id=self.role1['id']) + link=gp1_link, + project_id=project1['id'], + user_id=user1['id'], + role_id=self.role1['id'], + ) ud1_entity = self.build_role_assignment_entity( - link=gd1_link, domain_id=self.domain_id, user_id=user1['id'], - role_id=self.role1['id']) + link=gd1_link, + domain_id=self.domain_id, + user_id=user1['id'], + role_id=self.role1['id'], + ) self.assertRoleAssignmentInListResponse(r, up1_entity) self.assertRoleAssignmentInListResponse(r, ud1_entity) @@ -1070,14 +1152,14 @@ class AssignmentTestCase(test_v3.RestfulTestCase, collection_url = ( '/role_assignments?effective&user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) + '&scope.project.id=%(project_id)s' + % {'user_id': user1['id'], 'project_id': project1['id']} + ) r = self.get(collection_url, expected_status=http.client.OK) self.head(collection_url, expected_status=http.client.OK) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=2, resource_url=collection_url + ) # Should have one direct role and one from group membership... self.assertRoleAssignmentInListResponse(r, up_entity) self.assertRoleAssignmentInListResponse(r, up1_entity) @@ -1097,11 +1179,15 @@ class AssignmentTestCase(test_v3.RestfulTestCase, url = '/system/users/%s/roles/%s' % (user['id'], user_system_role_id) self.put(url) url = '/domains/%s/users/%s/roles/%s' % ( - self.domain_id, user['id'], user_domain_role_id + self.domain_id, + user['id'], + user_domain_role_id, ) self.put(url) url = '/projects/%s/users/%s/roles/%s' % ( - self.project_id, user['id'], user_project_role_id + self.project_id, + user['id'], + user_project_role_id, ) self.put(url) @@ -1109,15 +1195,20 @@ class AssignmentTestCase(test_v3.RestfulTestCase, # project group = self._create_group() url = '/system/groups/%s/roles/%s' % ( - group['id'], group_system_role_id + group['id'], + group_system_role_id, ) self.put(url) url = '/domains/%s/groups/%s/roles/%s' % ( - self.domain_id, group['id'], group_domain_role_id + self.domain_id, + group['id'], + group_domain_role_id, ) self.put(url) url = '/projects/%s/groups/%s/roles/%s' % ( - self.project_id, group['id'], group_project_role_id + self.project_id, + group['id'], + group_project_role_id, ) self.put(url) @@ -1130,8 +1221,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertEqual(user_system_role_id, assignment['role']['id']) if assignment.get('group'): self.assertEqual( - group_system_role_id, - assignment['role']['id'] + group_system_role_id, assignment['role']['id'] ) # /v3/role_assignments?scope_system=all&user.id=$USER_ID should return @@ -1141,7 +1231,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse(response, expected_length=1) self.assertEqual( user_system_role_id, - response.json_body['role_assignments'][0]['role']['id'] + response.json_body['role_assignments'][0]['role']['id'], ) # /v3/role_assignments?scope_system=all&group.id=$GROUP_ID should @@ -1151,7 +1241,7 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse(response, expected_length=1) self.assertEqual( group_system_role_id, - response.json_body['role_assignments'][0]['role']['id'] + response.json_body['role_assignments'][0]['role']['id'], ) # /v3/role_assignments?user.id=$USER_ID should return 3 assignments @@ -1161,13 +1251,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, self.assertValidRoleAssignmentListResponse(response, expected_length=3) for assignment in response.json_body['role_assignments']: if 'system' in assignment['scope']: - self.assertEqual( - user_system_role_id, assignment['role']['id'] - ) + self.assertEqual(user_system_role_id, assignment['role']['id']) if 'domain' in assignment['scope']: - self.assertEqual( - user_domain_role_id, assignment['role']['id'] - ) + self.assertEqual(user_domain_role_id, assignment['role']['id']) if 'project' in assignment['scope']: self.assertEqual( user_project_role_id, assignment['role']['id'] @@ -1193,8 +1279,9 @@ class AssignmentTestCase(test_v3.RestfulTestCase, ) -class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin): +class RoleAssignmentBaseTestCase( + test_v3.RestfulTestCase, test_v3.AssignmentTestMixin +): """Base class for testing /v3/role_assignments API behavior.""" MAX_HIERARCHY_BREADTH = 3 @@ -1207,6 +1294,7 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase, hierarchy and 3 users within 3 groups. """ + def create_project_hierarchy(parent_id, depth): """Create a random project hierarchy.""" if depth == 0: @@ -1216,8 +1304,11 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase, subprojects = [] for i in range(breadth): - subprojects.append(unit.new_project_ref( - domain_id=self.domain_id, parent_id=parent_id)) + subprojects.append( + unit.new_project_ref( + domain_id=self.domain_id, parent_id=parent_id + ) + ) PROVIDERS.resource_api.create_project( subprojects[-1]['id'], subprojects[-1] ) @@ -1238,8 +1329,9 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase, PROVIDERS.resource_api.create_project(self.project_id, self.project) # Create a random project hierarchy - create_project_hierarchy(self.project_id, - random.randint(1, self.MAX_HIERARCHY_DEPTH)) + create_project_hierarchy( + self.project_id, random.randint(1, self.MAX_HIERARCHY_DEPTH) + ) # Create 3 users self.user_ids = [] @@ -1264,8 +1356,9 @@ class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase, ) PROVIDERS.assignment_api.create_grant( - user_id=self.user_id, project_id=self.project_id, - role_id=self.role_id + user_id=self.user_id, + project_id=self.project_id, + role_id=self.role_id, ) # Create a role @@ -1319,24 +1412,33 @@ class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase): """ def test_get_role_assignments_by_domain_and_project(self): - self.get_role_assignments(domain_id=self.domain_id, - project_id=self.project_id, - expected_status=http.client.BAD_REQUEST) + self.get_role_assignments( + domain_id=self.domain_id, + project_id=self.project_id, + expected_status=http.client.BAD_REQUEST, + ) def test_get_role_assignments_by_user_and_group(self): - self.get_role_assignments(user_id=self.default_user_id, - group_id=self.default_group_id, - expected_status=http.client.BAD_REQUEST) + self.get_role_assignments( + user_id=self.default_user_id, + group_id=self.default_group_id, + expected_status=http.client.BAD_REQUEST, + ) def test_get_role_assignments_by_effective_and_inherited(self): - self.get_role_assignments(domain_id=self.domain_id, effective=True, - inherited_to_projects=True, - expected_status=http.client.BAD_REQUEST) + self.get_role_assignments( + domain_id=self.domain_id, + effective=True, + inherited_to_projects=True, + expected_status=http.client.BAD_REQUEST, + ) def test_get_role_assignments_by_effective_and_group(self): - self.get_role_assignments(effective=True, - group_id=self.default_group_id, - expected_status=http.client.BAD_REQUEST) + self.get_role_assignments( + effective=True, + group_id=self.default_group_id, + expected_status=http.client.BAD_REQUEST, + ) class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase): @@ -1372,14 +1474,18 @@ class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase): # Get expected role assignments expected_assignments = self._list_expected_role_assignments( - **test_assignment) + **test_assignment + ) # Get role assignments from API response, query_url = self.get_role_assignments(**test_assignment) - self.assertValidRoleAssignmentListResponse(response, - resource_url=query_url) - self.assertEqual(len(expected_assignments), - len(response.result.get('role_assignments'))) + self.assertValidRoleAssignmentListResponse( + response, resource_url=query_url + ) + self.assertEqual( + len(expected_assignments), + len(response.result.get('role_assignments')), + ) # Assert that expected role assignments were returned by the API call for assignment in expected_assignments: @@ -1399,8 +1505,9 @@ class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase): and inherited_to_projects. """ - if not any(target in attribs - for target in ('domain_id', 'projects_id')): + if not any( + target in attribs for target in ('domain_id', 'projects_id') + ): attribs['project_id'] = self.project_id if not any(actor in attribs for actor in ('user_id', 'group_id')): @@ -1436,47 +1543,57 @@ class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase): self._test_get_role_assignments(project_id=self.project_id, **filters) def test_get_role_assignments_by_user(self, **filters): - self._test_get_role_assignments(user_id=self.default_user_id, - **filters) + self._test_get_role_assignments( + user_id=self.default_user_id, **filters + ) def test_get_role_assignments_by_group(self, **filters): - self._test_get_role_assignments(group_id=self.default_group_id, - **filters) + self._test_get_role_assignments( + group_id=self.default_group_id, **filters + ) def test_get_role_assignments_by_role(self, **filters): self._test_get_role_assignments(role_id=self.role_id, **filters) def test_get_role_assignments_by_domain_and_user(self, **filters): - self.test_get_role_assignments_by_domain(user_id=self.default_user_id, - **filters) + self.test_get_role_assignments_by_domain( + user_id=self.default_user_id, **filters + ) def test_get_role_assignments_by_domain_and_group(self, **filters): self.test_get_role_assignments_by_domain( - group_id=self.default_group_id, **filters) + group_id=self.default_group_id, **filters + ) def test_get_role_assignments_by_project_and_user(self, **filters): - self.test_get_role_assignments_by_project(user_id=self.default_user_id, - **filters) + self.test_get_role_assignments_by_project( + user_id=self.default_user_id, **filters + ) def test_get_role_assignments_by_project_and_group(self, **filters): self.test_get_role_assignments_by_project( - group_id=self.default_group_id, **filters) + group_id=self.default_group_id, **filters + ) def test_get_role_assignments_by_domain_user_and_role(self, **filters): - self.test_get_role_assignments_by_domain_and_user(role_id=self.role_id, - **filters) + self.test_get_role_assignments_by_domain_and_user( + role_id=self.role_id, **filters + ) def test_get_role_assignments_by_domain_group_and_role(self, **filters): self.test_get_role_assignments_by_domain_and_group( - role_id=self.role_id, **filters) + role_id=self.role_id, **filters + ) def test_get_role_assignments_by_project_user_and_role(self, **filters): self.test_get_role_assignments_by_project_and_user( - role_id=self.role_id, **filters) + role_id=self.role_id, **filters + ) def test_get_role_assignments_by_project_group_and_role(self, **filters): self.test_get_role_assignments_by_project_and_group( - role_id=self.role_id, **filters) + role_id=self.role_id, **filters + ) class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase): @@ -1492,9 +1609,9 @@ class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase): def _test_get_role_assignments(self, **filters): """Add inherited_to_project filter to expected entity in tests.""" - super(RoleAssignmentInheritedTestCase, - self)._test_get_role_assignments(inherited_to_projects=True, - **filters) + super( + RoleAssignmentInheritedTestCase, self + )._test_get_role_assignments(inherited_to_projects=True, **filters) class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase): @@ -1529,8 +1646,9 @@ class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase): query_filters.pop('domain_id', None) query_filters.pop('project_id', None) - return self.build_role_assignment_query_url(effective=True, - **query_filters) + return self.build_role_assignment_query_url( + effective=True, **query_filters + ) def _list_expected_role_assignments(self, **filters): """Given the filters, it returns expected direct role assignments. @@ -1549,22 +1667,31 @@ class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase): # Expand group membership user_ids = [None] if filters.get('group_id'): - user_ids = [user['id'] for user in - PROVIDERS.identity_api.list_users_in_group( - filters['group_id'])] + user_ids = [ + user['id'] + for user in PROVIDERS.identity_api.list_users_in_group( + filters['group_id'] + ) + ] else: user_ids = [self.default_user_id] # Expand role inheritance project_ids = [None] if filters.get('domain_id'): - project_ids = [project['id'] for project in - PROVIDERS.resource_api.list_projects_in_domain( - filters.pop('domain_id'))] + project_ids = [ + project['id'] + for project in PROVIDERS.resource_api.list_projects_in_domain( + filters.pop('domain_id') + ) + ] else: - project_ids = [project['id'] for project in - PROVIDERS.resource_api.list_projects_in_subtree( - self.project_id)] + project_ids = [ + project['id'] + for project in PROVIDERS.resource_api.list_projects_in_subtree( + self.project_id + ) + ] # Compute expected role assignments assignments = [] @@ -1572,14 +1699,18 @@ class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase): filters['project_id'] = project_id for user_id in user_ids: filters['user_id'] = user_id - assignments.append(self.build_role_assignment_entity( - link=assignment_link, **filters)) + assignments.append( + self.build_role_assignment_entity( + link=assignment_link, **filters + ) + ) return assignments -class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin): +class AssignmentInheritanceTestCase( + test_v3.RestfulTestCase, test_v3.AssignmentTestMixin +): """Test inheritance crud and its effects.""" def test_get_token_from_inherited_user_domain_role_grants(self): @@ -1592,27 +1723,33 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, domain_auth_data = self.build_authentication_request( user_id=user['id'], password=user['password'], - domain_id=self.domain_id) + domain_id=self.domain_id, + ) project_auth_data = self.build_authentication_request( user_id=user['id'], password=user['password'], - project_id=self.project_id) + project_id=self.project_id, + ) # Check the user cannot get a domain nor a project token - self.v3_create_token(domain_auth_data, - expected_status=http.client.UNAUTHORIZED) - self.v3_create_token(project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + domain_auth_data, expected_status=http.client.UNAUTHORIZED + ) + self.v3_create_token( + project_auth_data, expected_status=http.client.UNAUTHORIZED + ) # Grant non-inherited role for user on domain non_inher_ud_link = self.build_role_assignment_link( - domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id) + domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id + ) self.put(non_inher_ud_link) # Check the user can get only a domain token self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + project_auth_data, expected_status=http.client.UNAUTHORIZED + ) # Create inherited role inherited_role = unit.new_role_ref(name='inherited') @@ -1620,8 +1757,11 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Grant inherited role for user on domain inher_ud_link = self.build_role_assignment_link( - domain_id=self.domain_id, user_id=user['id'], - role_id=inherited_role['id'], inherited_to_projects=True) + domain_id=self.domain_id, + user_id=user['id'], + role_id=inherited_role['id'], + inherited_to_projects=True, + ) self.put(inher_ud_link) # Check the user can get both a domain and a project token @@ -1633,15 +1773,17 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Check the user can only get a domain token self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + project_auth_data, expected_status=http.client.UNAUTHORIZED + ) # Delete non-inherited grant self.delete(non_inher_ud_link) # Check the user cannot get a domain token anymore - self.v3_create_token(domain_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + domain_auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_get_token_from_inherited_group_domain_role_grants(self): # Create a new group and put a new user in it to @@ -1658,27 +1800,33 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, domain_auth_data = self.build_authentication_request( user_id=user['id'], password=user['password'], - domain_id=self.domain_id) + domain_id=self.domain_id, + ) project_auth_data = self.build_authentication_request( user_id=user['id'], password=user['password'], - project_id=self.project_id) + project_id=self.project_id, + ) # Check the user cannot get a domain nor a project token - self.v3_create_token(domain_auth_data, - expected_status=http.client.UNAUTHORIZED) - self.v3_create_token(project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + domain_auth_data, expected_status=http.client.UNAUTHORIZED + ) + self.v3_create_token( + project_auth_data, expected_status=http.client.UNAUTHORIZED + ) # Grant non-inherited role for user on domain non_inher_gd_link = self.build_role_assignment_link( - domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id) + domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id + ) self.put(non_inher_gd_link) # Check the user can get only a domain token self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + project_auth_data, expected_status=http.client.UNAUTHORIZED + ) # Create inherited role inherited_role = unit.new_role_ref(name='inherited') @@ -1686,8 +1834,11 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Grant inherited role for user on domain inher_gd_link = self.build_role_assignment_link( - domain_id=self.domain_id, user_id=user['id'], - role_id=inherited_role['id'], inherited_to_projects=True) + domain_id=self.domain_id, + user_id=user['id'], + role_id=inherited_role['id'], + inherited_to_projects=True, + ) self.put(inher_gd_link) # Check the user can get both a domain and a project token @@ -1699,15 +1850,17 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Check the user can only get a domain token self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + project_auth_data, expected_status=http.client.UNAUTHORIZED + ) # Delete non-inherited grant self.delete(non_inher_gd_link) # Check the user cannot get a domain token anymore - self.v3_create_token(domain_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + domain_auth_data, expected_status=http.client.UNAUTHORIZED + ) def _test_crud_inherited_and_direct_assignment_on_target(self, target_url): time = datetime.datetime.utcnow() @@ -1718,9 +1871,13 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Define URLs direct_url = '%s/users/%s/roles/%s' % ( - target_url, self.user_id, role['id']) - inherited_url = ('/OS-INHERIT/%s/inherited_to_projects' % - direct_url.lstrip('/')) + target_url, + self.user_id, + role['id'], + ) + inherited_url = ( + '/OS-INHERIT/%s/inherited_to_projects' % direct_url.lstrip('/') + ) # Create the direct assignment self.put(direct_url) @@ -1751,11 +1908,13 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, def test_crud_inherited_and_direct_assignment_on_domains(self): self._test_crud_inherited_and_direct_assignment_on_target( - '/domains/%s' % self.domain_id) + '/domains/%s' % self.domain_id + ) def test_crud_inherited_and_direct_assignment_on_projects(self): self._test_crud_inherited_and_direct_assignment_on_target( - '/projects/%s' % self.project_id) + '/projects/%s' % self.project_id + ) def test_crud_user_inherited_domain_role_grants(self): role_list = [] @@ -1766,16 +1925,19 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Create a non-inherited role as a spoiler PROVIDERS.assignment_api.create_grant( - role_list[1]['id'], user_id=self.user['id'], - domain_id=self.domain_id) + role_list[1]['id'], + user_id=self.user['id'], + domain_id=self.domain_id, + ) base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, - 'user_id': self.user['id']}) + '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' + % {'domain_id': self.domain_id, 'user_id': self.user['id']} + ) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, - 'role_id': role_list[0]['id']} + 'role_id': role_list[0]['id'], + } collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) @@ -1784,14 +1946,16 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[0], - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=role_list[0], resource_url=collection_url + ) # Now delete and check its gone self.delete(member_url) r = self.get(collection_url) - self.assertValidRoleListResponse(r, expected_length=0, - resource_url=collection_url) + self.assertValidRoleListResponse( + r, expected_length=0, resource_url=collection_url + ) def test_list_role_assignments_for_inherited_domain_grants(self): """Call ``GET /role_assignments with inherited domain grants``. @@ -1825,44 +1989,52 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, PROVIDERS.resource_api.create_project(project2['id'], project2) # Add some roles to the project PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[0]['id']) + user1['id'], project1['id'], role_list[0]['id'] + ) PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[1]['id']) + user1['id'], project1['id'], role_list[1]['id'] + ) # ..and one on a different project as a spoiler PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project2['id'], role_list[2]['id']) + user1['id'], project2['id'], role_list[2]['id'] + ) # Now create our inherited role on the domain base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': domain['id'], - 'user_id': user1['id']}) + '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' + % {'domain_id': domain['id'], 'user_id': user1['id']} + ) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, - 'role_id': role_list[3]['id']} + 'role_id': role_list[3]['id'], + } collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[3], - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=role_list[3], resource_url=collection_url + ) # Now use the list domain role assignments api to check if this # is included collection_url = ( '/role_assignments?user.id=%(user_id)s' - '&scope.domain.id=%(domain_id)s' % { - 'user_id': user1['id'], - 'domain_id': domain['id']}) + '&scope.domain.id=%(domain_id)s' + % {'user_id': user1['id'], 'domain_id': domain['id']} + ) r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=1, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=1, resource_url=collection_url + ) ud_entity = self.build_role_assignment_entity( - domain_id=domain['id'], user_id=user1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) + domain_id=domain['id'], + user_id=user1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, + ) self.assertRoleAssignmentInListResponse(r, ud_entity) # Now ask for effective list role assignments - the role should @@ -1870,22 +2042,28 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # on the project collection_url = ( '/role_assignments?effective&user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) + '&scope.project.id=%(project_id)s' + % {'user_id': user1['id'], 'project_id': project1['id']} + ) r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=3, resource_url=collection_url + ) # An effective role for an inherited role will be a project # entity, with a domain link to the inherited assignment ud_url = self.build_role_assignment_link( - domain_id=domain['id'], user_id=user1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) + domain_id=domain['id'], + user_id=user1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, + ) up_entity = self.build_role_assignment_entity( - link=ud_url, project_id=project1['id'], - user_id=user1['id'], role_id=role_list[3]['id'], - inherited_to_projects=True) + link=ud_url, + project_id=project1['id'], + user_id=user1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, + ) self.assertRoleAssignmentInListResponse(r, up_entity) def _test_list_role_assignments_include_names(self, role1): @@ -1908,46 +2086,46 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, PROVIDERS.resource_api.create_project(project1['id'], project1) expected_entity1 = self.build_role_assignment_entity_include_names( - role_ref=role1, - project_ref=project1, - user_ref=user1) + role_ref=role1, project_ref=project1, user_ref=user1 + ) self.put(expected_entity1['links']['assignment']) expected_entity2 = self.build_role_assignment_entity_include_names( - role_ref=role1, - domain_ref=self.domain, - group_ref=group) + role_ref=role1, domain_ref=self.domain, group_ref=group + ) self.put(expected_entity2['links']['assignment']) expected_entity3 = self.build_role_assignment_entity_include_names( - role_ref=role1, - domain_ref=self.domain, - user_ref=user1) + role_ref=role1, domain_ref=self.domain, user_ref=user1 + ) self.put(expected_entity3['links']['assignment']) expected_entity4 = self.build_role_assignment_entity_include_names( - role_ref=role1, - project_ref=project1, - group_ref=group) + role_ref=role1, project_ref=project1, group_ref=group + ) self.put(expected_entity4['links']['assignment']) collection_url_domain = ( - '/role_assignments?include_names&scope.domain.id=%(domain_id)s' % { - 'domain_id': self.domain_id}) + '/role_assignments?include_names&scope.domain.id=%(domain_id)s' + % {'domain_id': self.domain_id} + ) rs_domain = self.get(collection_url_domain) collection_url_project = ( '/role_assignments?include_names&' - 'scope.project.id=%(project_id)s' % { - 'project_id': project1['id']}) + 'scope.project.id=%(project_id)s' % {'project_id': project1['id']} + ) rs_project = self.get(collection_url_project) collection_url_group = ( - '/role_assignments?include_names&group.id=%(group_id)s' % { - 'group_id': group['id']}) + '/role_assignments?include_names&group.id=%(group_id)s' + % {'group_id': group['id']} + ) rs_group = self.get(collection_url_group) collection_url_user = ( - '/role_assignments?include_names&user.id=%(user_id)s' % { - 'user_id': user1['id']}) + '/role_assignments?include_names&user.id=%(user_id)s' + % {'user_id': user1['id']} + ) rs_user = self.get(collection_url_user) collection_url_role = ( - '/role_assignments?include_names&role.id=%(role_id)s' % { - 'role_id': role1['id']}) + '/role_assignments?include_names&role.id=%(role_id)s' + % {'role_id': role1['id']} + ) rs_role = self.get(collection_url_role) # Make sure all entities were created successfully self.assertEqual(http.client.OK, rs_domain.status_int) @@ -1956,25 +2134,20 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, self.assertEqual(http.client.OK, rs_user.status_int) # Make sure we can get back the correct number of entities self.assertValidRoleAssignmentListResponse( - rs_domain, - expected_length=2, - resource_url=collection_url_domain) + rs_domain, expected_length=2, resource_url=collection_url_domain + ) self.assertValidRoleAssignmentListResponse( - rs_project, - expected_length=2, - resource_url=collection_url_project) + rs_project, expected_length=2, resource_url=collection_url_project + ) self.assertValidRoleAssignmentListResponse( - rs_group, - expected_length=2, - resource_url=collection_url_group) + rs_group, expected_length=2, resource_url=collection_url_group + ) self.assertValidRoleAssignmentListResponse( - rs_user, - expected_length=2, - resource_url=collection_url_user) + rs_user, expected_length=2, resource_url=collection_url_user + ) self.assertValidRoleAssignmentListResponse( - rs_role, - expected_length=4, - resource_url=collection_url_role) + rs_role, expected_length=4, resource_url=collection_url_role + ) # Verify all types of entities have the correct format self.assertRoleAssignmentInListResponse(rs_domain, expected_entity2) self.assertRoleAssignmentInListResponse(rs_project, expected_entity1) @@ -2017,35 +2190,48 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id']) assignment_domain = self.build_role_assignment_entity( - role_id=role['id'], domain_id=domain['id'], user_id=user['id'], - inherited_to_projects=False) + role_id=role['id'], + domain_id=domain['id'], + user_id=user['id'], + inherited_to_projects=False, + ) assignment_project = self.build_role_assignment_entity( - role_id=role['id'], project_id=domain['id'], user_id=user['id'], - inherited_to_projects=False) + role_id=role['id'], + project_id=domain['id'], + user_id=user['id'], + inherited_to_projects=False, + ) self.put(assignment_domain['links']['assignment']) self.put(assignment_project['links']['assignment']) collection_url = '/role_assignments?user.id=%(user_id)s' % ( - {'user_id': user['id']}) + {'user_id': user['id']} + ) result = self.get(collection_url) # We have two role assignments based in both roles for the domain and # project scope self.assertValidRoleAssignmentListResponse( - result, expected_length=2, resource_url=collection_url) + result, expected_length=2, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(result, assignment_domain) domain_url = '/domains/%s/users/%s/roles/%s' % ( - domain['id'], user['id'], role['id']) + domain['id'], + user['id'], + role['id'], + ) self.delete(domain_url) collection_url = '/role_assignments?user.id=%(user_id)s' % ( - {'user_id': user['id']}) + {'user_id': user['id']} + ) result = self.get(collection_url) # Now we only have one assignment for the project scope since the # domain scope was removed. self.assertValidRoleAssignmentListResponse( - result, expected_length=1, resource_url=collection_url) + result, expected_length=1, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(result, assignment_project) def test_list_inherited_role_assignments_include_names(self): @@ -2069,34 +2255,49 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Create and store expected assignment refs assignment = self.build_role_assignment_entity( - role_id=role['id'], domain_id=domain['id'], user_id=user['id'], - inherited_to_projects=True) + role_id=role['id'], + domain_id=domain['id'], + user_id=user['id'], + inherited_to_projects=True, + ) assignment_names = self.build_role_assignment_entity_include_names( - role_ref=role, domain_ref=domain, user_ref=user, - inherited_assignment=True) + role_ref=role, + domain_ref=domain, + user_ref=user, + inherited_assignment=True, + ) # Ensure expected assignment refs are inherited and have the same URL - self.assertEqual('projects', - assignment['scope']['OS-INHERIT:inherited_to']) - self.assertEqual('projects', - assignment_names['scope']['OS-INHERIT:inherited_to']) - self.assertEqual(assignment['links']['assignment'], - assignment_names['links']['assignment']) + self.assertEqual( + 'projects', assignment['scope']['OS-INHERIT:inherited_to'] + ) + self.assertEqual( + 'projects', assignment_names['scope']['OS-INHERIT:inherited_to'] + ) + self.assertEqual( + assignment['links']['assignment'], + assignment_names['links']['assignment'], + ) self.put(assignment['links']['assignment']) collection_url = '/role_assignments?user.id=%(user_id)s' % ( - {'user_id': user['id']}) + {'user_id': user['id']} + ) result = self.get(collection_url) self.assertValidRoleAssignmentListResponse( - result, expected_length=1, resource_url=collection_url) + result, expected_length=1, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(result, assignment) - collection_url = ('/role_assignments?include_names&' - 'user.id=%(user_id)s' % {'user_id': user['id']}) + collection_url = ( + '/role_assignments?include_names&' + 'user.id=%(user_id)s' % {'user_id': user['id']} + ) result = self.get(collection_url) self.assertValidRoleAssignmentListResponse( - result, expected_length=1, resource_url=collection_url) + result, expected_length=1, resource_url=collection_url + ) self.assertRoleAssignmentInListResponse(result, assignment_names) def test_list_role_assignments_for_disabled_inheritance_extension(self): @@ -2129,50 +2330,61 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, PROVIDERS.resource_api.create_project(project2['id'], project2) # Add some roles to the project PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[0]['id']) + user1['id'], project1['id'], role_list[0]['id'] + ) PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[1]['id']) + user1['id'], project1['id'], role_list[1]['id'] + ) # ..and one on a different project as a spoiler PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project2['id'], role_list[2]['id']) + user1['id'], project2['id'], role_list[2]['id'] + ) # Now create our inherited role on the domain base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': domain['id'], - 'user_id': user1['id']}) + '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' + % {'domain_id': domain['id'], 'user_id': user1['id']} + ) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, - 'role_id': role_list[3]['id']} + 'role_id': role_list[3]['id'], + } collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[3], - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=role_list[3], resource_url=collection_url + ) # Get effective list role assignments - the role should # turn into a project role, along with the two direct roles that are # on the project collection_url = ( '/role_assignments?effective&user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) + '&scope.project.id=%(project_id)s' + % {'user_id': user1['id'], 'project_id': project1['id']} + ) r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=3, resource_url=collection_url + ) ud_url = self.build_role_assignment_link( - domain_id=domain['id'], user_id=user1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) + domain_id=domain['id'], + user_id=user1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, + ) up_entity = self.build_role_assignment_entity( - link=ud_url, project_id=project1['id'], - user_id=user1['id'], role_id=role_list[3]['id'], - inherited_to_projects=True) + link=ud_url, + project_id=project1['id'], + user_id=user1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, + ) self.assertRoleAssignmentInListResponse(r, up_entity) @@ -2207,56 +2419,60 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, ) group1 = unit.new_group_ref(domain_id=domain['id']) group1 = PROVIDERS.identity_api.create_group(group1) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group1['id'] - ) - PROVIDERS.identity_api.add_user_to_group( - user2['id'], group1['id'] - ) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id']) + PROVIDERS.identity_api.add_user_to_group(user2['id'], group1['id']) project1 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) project2 = unit.new_project_ref(domain_id=domain['id']) PROVIDERS.resource_api.create_project(project2['id'], project2) # Add some roles to the project PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[0]['id']) + user1['id'], project1['id'], role_list[0]['id'] + ) PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[1]['id']) + user1['id'], project1['id'], role_list[1]['id'] + ) # ..and one on a different project as a spoiler PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project2['id'], role_list[2]['id']) + user1['id'], project2['id'], role_list[2]['id'] + ) # Now create our inherited role on the domain base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': domain['id'], - 'group_id': group1['id']}) + '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' + % {'domain_id': domain['id'], 'group_id': group1['id']} + ) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, - 'role_id': role_list[3]['id']} + 'role_id': role_list[3]['id'], + } collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[3], - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=role_list[3], resource_url=collection_url + ) # Now use the list domain role assignments api to check if this # is included collection_url = ( '/role_assignments?group.id=%(group_id)s' - '&scope.domain.id=%(domain_id)s' % { - 'group_id': group1['id'], - 'domain_id': domain['id']}) + '&scope.domain.id=%(domain_id)s' + % {'group_id': group1['id'], 'domain_id': domain['id']} + ) r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=1, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=1, resource_url=collection_url + ) gd_entity = self.build_role_assignment_entity( - domain_id=domain['id'], group_id=group1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) + domain_id=domain['id'], + group_id=group1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, + ) self.assertRoleAssignmentInListResponse(r, gd_entity) # Now ask for effective list role assignments - the role should @@ -2264,19 +2480,22 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # that are on the project collection_url = ( '/role_assignments?effective&user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) + '&scope.project.id=%(project_id)s' + % {'user_id': user1['id'], 'project_id': project1['id']} + ) r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=3, resource_url=collection_url + ) # An effective role for an inherited role will be a project # entity, with a domain link to the inherited assignment up_entity = self.build_role_assignment_entity( - link=gd_entity['links']['assignment'], project_id=project1['id'], - user_id=user1['id'], role_id=role_list[3]['id'], - inherited_to_projects=True) + link=gd_entity['links']['assignment'], + project_id=project1['id'], + user_id=user1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, + ) self.assertRoleAssignmentInListResponse(r, up_entity) def test_filtered_role_assignments_for_inherited_grants(self): @@ -2312,61 +2531,75 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, PROVIDERS.resource_api.create_project(project2['id'], project2) # Add some spoiler roles to the projects PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[0]['id']) + user1['id'], project1['id'], role_list[0]['id'] + ) PROVIDERS.assignment_api.add_role_to_user_and_project( - user1['id'], project2['id'], role_list[1]['id']) + user1['id'], project2['id'], role_list[1]['id'] + ) # Create a non-inherited role as a spoiler PROVIDERS.assignment_api.create_grant( - role_list[2]['id'], user_id=user1['id'], domain_id=domain['id']) + role_list[2]['id'], user_id=user1['id'], domain_id=domain['id'] + ) # Now create two inherited roles on the domain, one for a user # and one for a domain base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': domain['id'], - 'user_id': user1['id']}) + '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' + % {'domain_id': domain['id'], 'user_id': user1['id']} + ) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, - 'role_id': role_list[3]['id']} + 'role_id': role_list[3]['id'], + } collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[3], - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=role_list[3], resource_url=collection_url + ) base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': domain['id'], - 'group_id': group1['id']}) + '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' + % {'domain_id': domain['id'], 'group_id': group1['id']} + ) member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { 'collection_url': base_collection_url, - 'role_id': role_list[4]['id']} + 'role_id': role_list[4]['id'], + } collection_url = base_collection_url + '/inherited_to_projects' self.put(member_url) self.head(member_url) self.get(member_url, expected_status=http.client.NO_CONTENT) r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[4], - resource_url=collection_url) + self.assertValidRoleListResponse( + r, ref=role_list[4], resource_url=collection_url + ) # Now use the list role assignments api to get a list of inherited # roles on the domain - should get back the two roles collection_url = ( - '/role_assignments?scope.OS-INHERIT:inherited_to=projects') + '/role_assignments?scope.OS-INHERIT:inherited_to=projects' + ) r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, expected_length=2, resource_url=collection_url + ) ud_entity = self.build_role_assignment_entity( - domain_id=domain['id'], user_id=user1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) + domain_id=domain['id'], + user_id=user1['id'], + role_id=role_list[3]['id'], + inherited_to_projects=True, + ) gd_entity = self.build_role_assignment_entity( - domain_id=domain['id'], group_id=group1['id'], - role_id=role_list[4]['id'], inherited_to_projects=True) + domain_id=domain['id'], + group_id=group1['id'], + role_id=role_list[4]['id'], + inherited_to_projects=True, + ) self.assertRoleAssignmentInListResponse(r, ud_entity) self.assertRoleAssignmentInListResponse(r, gd_entity) @@ -2379,8 +2612,9 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, """ # Create project hierarchy root = unit.new_project_ref(domain_id=self.domain['id']) - leaf = unit.new_project_ref(domain_id=self.domain['id'], - parent_id=root['id']) + leaf = unit.new_project_ref( + domain_id=self.domain['id'], parent_id=root['id'] + ) PROVIDERS.resource_api.create_project(root['id'], root) PROVIDERS.resource_api.create_project(leaf['id'], leaf) @@ -2393,71 +2627,90 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, inherited_role = unit.new_role_ref(name='inherited') PROVIDERS.role_api.create_role(inherited_role['id'], inherited_role) - return (root['id'], leaf['id'], - non_inherited_role['id'], inherited_role['id']) + return ( + root['id'], + leaf['id'], + non_inherited_role['id'], + inherited_role['id'], + ) def test_get_token_from_inherited_user_project_role_grants(self): # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) + self._setup_hierarchical_projects_scenario() + ) # Define root and leaf projects authentication data root_project_auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=root_id) + project_id=root_id, + ) leaf_project_auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=leaf_id) + project_id=leaf_id, + ) # Check the user cannot get a token on root nor leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http.client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + root_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) + self.v3_create_token( + leaf_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) # Grant non-inherited role for user on leaf project non_inher_up_link = self.build_role_assignment_link( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=leaf_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_up_link) # Check the user can only get a token on leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + root_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) self.v3_create_token(leaf_project_auth_data) # Grant inherited role for user on root project inher_up_link = self.build_role_assignment_link( - project_id=root_id, user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) + project_id=root_id, + user_id=self.user['id'], + role_id=inherited_role_id, + inherited_to_projects=True, + ) self.put(inher_up_link) # Check the user still can get a token only on leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + root_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) self.v3_create_token(leaf_project_auth_data) # Delete non-inherited grant self.delete(non_inher_up_link) # Check the inherited role still applies for leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + root_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) self.v3_create_token(leaf_project_auth_data) # Delete inherited grant self.delete(inher_up_link) # Check the user cannot get a token on leaf project anymore - self.v3_create_token(leaf_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + leaf_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_get_token_from_inherited_group_project_role_grants(self): # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) + self._setup_hierarchical_projects_scenario() + ) # Create group and add user to it group = unit.new_group_ref(domain_id=self.domain['id']) @@ -2468,38 +2721,49 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, root_project_auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=root_id) + project_id=root_id, + ) leaf_project_auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=leaf_id) + project_id=leaf_id, + ) # Check the user cannot get a token on root nor leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http.client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + root_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) + self.v3_create_token( + leaf_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) # Grant non-inherited role for group on leaf project non_inher_gp_link = self.build_role_assignment_link( - project_id=leaf_id, group_id=group['id'], - role_id=non_inherited_role_id) + project_id=leaf_id, + group_id=group['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_gp_link) # Check the user can only get a token on leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + root_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) self.v3_create_token(leaf_project_auth_data) # Grant inherited role for group on root project inher_gp_link = self.build_role_assignment_link( - project_id=root_id, group_id=group['id'], - role_id=inherited_role_id, inherited_to_projects=True) + project_id=root_id, + group_id=group['id'], + role_id=inherited_role_id, + inherited_to_projects=True, + ) self.put(inher_gp_link) # Check the user still can get a token only on leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + root_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) self.v3_create_token(leaf_project_auth_data) # Delete no-inherited grant @@ -2512,8 +2776,9 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, self.delete(inher_gp_link) # Check the user cannot get a token on leaf project anymore - self.v3_create_token(leaf_project_auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + leaf_project_auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_get_role_assignments_for_project_hierarchy(self): """Call ``GET /role_assignments``. @@ -2530,25 +2795,32 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, """ # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) + self._setup_hierarchical_projects_scenario() + ) # Grant non-inherited role non_inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=root_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_up_entity['links']['assignment']) # Grant inherited role inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) + project_id=root_id, + user_id=self.user['id'], + role_id=inherited_role_id, + inherited_to_projects=True, + ) self.put(inher_up_entity['links']['assignment']) # Get role assignments collection_url = '/role_assignments' r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, resource_url=collection_url + ) # Assert that the user has non-inherited role on root project self.assertRoleAssignmentInListResponse(r, non_inher_up_entity) @@ -2558,8 +2830,10 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Assert that the user does not have non-inherited role on leaf project non_inher_up_entity = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=leaf_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) # Assert that the user does not have inherited role on leaf project @@ -2582,25 +2856,32 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, """ # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) + self._setup_hierarchical_projects_scenario() + ) # Grant non-inherited role non_inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=root_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_up_entity['links']['assignment']) # Grant inherited role inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) + project_id=root_id, + user_id=self.user['id'], + role_id=inherited_role_id, + inherited_to_projects=True, + ) self.put(inher_up_entity['links']['assignment']) # Get effective role assignments collection_url = '/role_assignments?effective' r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, resource_url=collection_url + ) # Assert that the user has non-inherited role on root project self.assertRoleAssignmentInListResponse(r, non_inher_up_entity) @@ -2610,8 +2891,10 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Assert that the user does not have non-inherited role on leaf project non_inher_up_entity = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=leaf_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) # Assert that the user has inherited role on leaf project @@ -2620,14 +2903,19 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, def test_project_id_specified_if_include_subtree_specified(self): """When using include_subtree, you must specify a project ID.""" - r = self.get('/role_assignments?include_subtree=True', - expected_status=http.client.BAD_REQUEST) - error_msg = ("scope.project.id must be specified if include_subtree " - "is also specified") + r = self.get( + '/role_assignments?include_subtree=True', + expected_status=http.client.BAD_REQUEST, + ) + error_msg = ( + "scope.project.id must be specified if include_subtree " + "is also specified" + ) self.assertEqual(error_msg, r.result['error']['message']) - r = self.get('/role_assignments?scope.project.id&' - 'include_subtree=True', - expected_status=http.client.BAD_REQUEST) + r = self.get( + '/role_assignments?scope.project.id&' 'include_subtree=True', + expected_status=http.client.BAD_REQUEST, + ) self.assertEqual(error_msg, r.result['error']['message']) def test_get_role_assignments_for_project_tree(self): @@ -2648,26 +2936,32 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, """ # Create default scenario root_id, leaf_id, non_inherited_role_id, unused_role_id = ( - self._setup_hierarchical_projects_scenario()) + self._setup_hierarchical_projects_scenario() + ) # Grant non-inherited role to root and leaf projects non_inher_entity_root = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=root_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_entity_root['links']['assignment']) non_inher_entity_leaf = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=leaf_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_entity_leaf['links']['assignment']) # Without the subtree, we should get the one assignment on the # root project - collection_url = ( - '/role_assignments?scope.project.id=%(project)s' % { - 'project': root_id}) + collection_url = '/role_assignments?scope.project.id=%(project)s' % { + 'project': root_id + } r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) + r, resource_url=collection_url + ) self.assertThat(r.result['role_assignments'], matchers.HasLength(1)) self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) @@ -2675,11 +2969,12 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # With the subtree, we should get both assignments collection_url = ( '/role_assignments?scope.project.id=%(project)s' - '&include_subtree=True' % { - 'project': root_id}) + '&include_subtree=True' % {'project': root_id} + ) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) + r, resource_url=collection_url + ) self.assertThat(r.result['role_assignments'], matchers.HasLength(2)) self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) @@ -2689,11 +2984,12 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # root project collection_url = ( '/role_assignments?scope.project.id=%(project)s' - '&include_subtree=0' % { - 'project': root_id}) + '&include_subtree=0' % {'project': root_id} + ) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) + r, resource_url=collection_url + ) self.assertThat(r.result['role_assignments'], matchers.HasLength(1)) self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) @@ -2716,15 +3012,19 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, """ # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) + self._setup_hierarchical_projects_scenario() + ) # Add some extra projects to the project hierarchy - level2 = unit.new_project_ref(domain_id=self.domain['id'], - parent_id=leaf_id) - level3 = unit.new_project_ref(domain_id=self.domain['id'], - parent_id=level2['id']) - level4 = unit.new_project_ref(domain_id=self.domain['id'], - parent_id=level3['id']) + level2 = unit.new_project_ref( + domain_id=self.domain['id'], parent_id=leaf_id + ) + level3 = unit.new_project_ref( + domain_id=self.domain['id'], parent_id=level2['id'] + ) + level4 = unit.new_project_ref( + domain_id=self.domain['id'], parent_id=level3['id'] + ) PROVIDERS.resource_api.create_project(level2['id'], level2) PROVIDERS.resource_api.create_project(level3['id'], level3) PROVIDERS.resource_api.create_project(level4['id'], level4) @@ -2732,28 +3032,36 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Grant non-inherited role to root (as a spoiler) and to # the level 1 (leaf) project non_inher_entity_root = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=root_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_entity_root['links']['assignment']) non_inher_entity_leaf = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=leaf_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_entity_leaf['links']['assignment']) # Grant inherited role to level 2 inher_entity = self.build_role_assignment_entity( - project_id=level2['id'], user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) + project_id=level2['id'], + user_id=self.user['id'], + role_id=inherited_role_id, + inherited_to_projects=True, + ) self.put(inher_entity['links']['assignment']) # Get effective role assignments collection_url = ( '/role_assignments?scope.project.id=%(project)s' - '&include_subtree=True&effective' % { - 'project': leaf_id}) + '&include_subtree=True&effective' % {'project': leaf_id} + ) r = self.get(collection_url) self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) + r, resource_url=collection_url + ) # There should be three assignments returned in total self.assertThat(r.result['role_assignments'], matchers.HasLength(3)) @@ -2785,26 +3093,34 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, """ # Create default scenario root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) + self._setup_hierarchical_projects_scenario() + ) # Grant non-inherited role non_inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=root_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.put(non_inher_up_entity['links']['assignment']) # Grant inherited role inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) + project_id=root_id, + user_id=self.user['id'], + role_id=inherited_role_id, + inherited_to_projects=True, + ) self.put(inher_up_entity['links']['assignment']) # Get inherited role assignments - collection_url = ('/role_assignments' - '?scope.OS-INHERIT:inherited_to=projects') + collection_url = ( + '/role_assignments' '?scope.OS-INHERIT:inherited_to=projects' + ) r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) + self.assertValidRoleAssignmentListResponse( + r, resource_url=collection_url + ) # Assert that the user does not have non-inherited role on root project self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) @@ -2814,8 +3130,10 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, # Assert that the user does not have non-inherited role on leaf project non_inher_up_entity = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) + project_id=leaf_id, + user_id=self.user['id'], + role_id=non_inherited_role_id, + ) self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) # Assert that the user does not have inherited role on leaf project @@ -2823,8 +3141,9 @@ class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, self.assertRoleAssignmentNotInListResponse(r, inher_up_entity) -class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, - unit.TestCase): +class ImpliedRolesTests( + test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, unit.TestCase +): def _create_role(self): """Call ``POST /roles``.""" ref = unit.new_role_ref() @@ -2840,8 +3159,10 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, self.assertEqual(0, len(response['implies'])) def _create_implied_role(self, prior, implied): - self.put('/roles/%s/implies/%s' % (prior['id'], implied['id']), - expected_status=http.client.CREATED) + self.put( + '/roles/%s/implies/%s' % (prior['id'], implied['id']), + expected_status=http.client.CREATED, + ) def _delete_implied_role(self, prior, implied): self.delete('/roles/%s/implies/%s' % (prior['id'], implied['id'])) @@ -2854,17 +3175,20 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, self._create_implied_role(self.prior, self.implied2) def _assert_expected_implied_role_response( - self, expected_prior_id, expected_implied_ids): + self, expected_prior_id, expected_implied_ids + ): r = self.get('/roles/%s/implies' % expected_prior_id) response = r.json role_inference = response['role_inference'] self.assertEqual(expected_prior_id, role_inference['prior_role']['id']) prior_link = '/v3/roles/' + expected_prior_id + '/implies' - self.assertThat(response['links']['self'], - matchers.EndsWith(prior_link)) + self.assertThat( + response['links']['self'], matchers.EndsWith(prior_link) + ) - actual_implied_ids = [implied['id'] - for implied in role_inference['implies']] + actual_implied_ids = [ + implied['id'] for implied in role_inference['implies'] + ] self.assertCountEqual(expected_implied_ids, actual_implied_ids) @@ -2873,38 +3197,49 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, self.assertIsNotNone(implied['links']['self']) def _assert_expected_role_inference_rule_response( - self, expected_prior_id, expected_implied_id): + self, expected_prior_id, expected_implied_id + ): url = '/roles/%s/implies/%s' % (expected_prior_id, expected_implied_id) response = self.get(url).json - self.assertThat(response['links']['self'], - matchers.EndsWith('/v3%s' % url)) + self.assertThat( + response['links']['self'], matchers.EndsWith('/v3%s' % url) + ) role_inference = response['role_inference'] prior_role = role_inference['prior_role'] self.assertEqual(expected_prior_id, prior_role['id']) self.assertIsNotNone(prior_role['name']) - self.assertThat(prior_role['links']['self'], - matchers.EndsWith('/v3/roles/%s' % expected_prior_id)) + self.assertThat( + prior_role['links']['self'], + matchers.EndsWith('/v3/roles/%s' % expected_prior_id), + ) implied_role = role_inference['implies'] self.assertEqual(expected_implied_id, implied_role['id']) self.assertIsNotNone(implied_role['name']) - self.assertThat(implied_role['links']['self'], matchers.EndsWith( - '/v3/roles/%s' % expected_implied_id)) + self.assertThat( + implied_role['links']['self'], + matchers.EndsWith('/v3/roles/%s' % expected_implied_id), + ) def _assert_two_roles_implied(self): self._assert_expected_implied_role_response( - self.prior['id'], [self.implied1['id'], self.implied2['id']]) + self.prior['id'], [self.implied1['id'], self.implied2['id']] + ) self._assert_expected_role_inference_rule_response( - self.prior['id'], self.implied1['id']) + self.prior['id'], self.implied1['id'] + ) self._assert_expected_role_inference_rule_response( - self.prior['id'], self.implied2['id']) + self.prior['id'], self.implied2['id'] + ) def _assert_one_role_implied(self): self._assert_expected_implied_role_response( - self.prior['id'], [self.implied1['id']]) + self.prior['id'], [self.implied1['id']] + ) - self.get('/roles/%s/implies/%s' % - (self.prior['id'], self.implied2['id']), - expected_status=http.client.NOT_FOUND) + self.get( + '/roles/%s/implies/%s' % (self.prior['id'], self.implied2['id']), + expected_status=http.client.NOT_FOUND, + ) def _assert_two_rules_defined(self): r = self.get('/role_inferences/') @@ -2961,11 +3296,13 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, def _assign_top_role_to_user_on_project(self, user, project): PROVIDERS.assignment_api.add_role_to_user_and_project( - user['id'], project['id'], self.role_list[0]['id']) + user['id'], project['id'], self.role_list[0]['id'] + ) def _build_effective_role_assignments_url(self, user): return '/role_assignments?effective&user.id=%(user_id)s' % { - 'user_id': user['id']} + 'user_id': user['id'] + } def _assert_all_roles_in_assignment(self, response, user): # Now use the list role assignments api to check that all three roles @@ -2973,30 +3310,40 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, self.assertValidRoleAssignmentListResponse( response, expected_length=len(self.role_list), - resource_url=self._build_effective_role_assignments_url(user)) + resource_url=self._build_effective_role_assignments_url(user), + ) def _assert_initial_assignment_in_effective(self, response, user, project): # The initial assignment should be there (the link url will be # generated and checked automatically since it matches the assignment) entity = self.build_role_assignment_entity( project_id=project['id'], - user_id=user['id'], role_id=self.role_list[0]['id']) + user_id=user['id'], + role_id=self.role_list[0]['id'], + ) self.assertRoleAssignmentInListResponse(response, entity) def _assert_effective_role_for_implied_has_prior_in_links( - self, response, user, project, prior_index, implied_index): + self, response, user, project, prior_index, implied_index + ): # An effective role for an implied role will have the prior role # assignment in the links prior_link = '/prior_roles/%(prior)s/implies/%(implied)s' % { 'prior': self.role_list[prior_index]['id'], - 'implied': self.role_list[implied_index]['id']} + 'implied': self.role_list[implied_index]['id'], + } link = self.build_role_assignment_link( - project_id=project['id'], user_id=user['id'], - role_id=self.role_list[prior_index]['id']) + project_id=project['id'], + user_id=user['id'], + role_id=self.role_list[prior_index]['id'], + ) entity = self.build_role_assignment_entity( - link=link, project_id=project['id'], - user_id=user['id'], role_id=self.role_list[implied_index]['id'], - prior_link=prior_link) + link=link, + project_id=project['id'], + user_id=user['id'], + role_id=self.role_list[implied_index]['id'], + prior_link=prior_link, + ) self.assertRoleAssignmentInListResponse(response, entity) def test_list_role_assignments_with_implied_roles(self): @@ -3026,9 +3373,11 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, self._assert_all_roles_in_assignment(r, user) self._assert_initial_assignment_in_effective(response, user, project) self._assert_effective_role_for_implied_has_prior_in_links( - response, user, project, 0, 1) + response, user, project, 0, 1 + ) self._assert_effective_role_for_implied_has_prior_in_links( - response, user, project, 1, 2) + response, user, project, 1, 2 + ) def _create_named_role(self, name): role = unit.new_role_ref() @@ -3051,27 +3400,31 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, accepted_name1 = 'implied1' prohibited_names = [prohibited_name1, prohibited_name2] - self.config_fixture.config(group='assignment', - prohibited_implied_role=prohibited_names) + self.config_fixture.config( + group='assignment', prohibited_implied_role=prohibited_names + ) prior_role = self._create_role() prohibited_role1 = self._create_named_role(prohibited_name1) url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( prior_role_id=prior_role['id'], - implied_role_id=prohibited_role1['id']) + implied_role_id=prohibited_role1['id'], + ) self.put(url, expected_status=http.client.FORBIDDEN) prohibited_role2 = self._create_named_role(prohibited_name2) url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( prior_role_id=prior_role['id'], - implied_role_id=prohibited_role2['id']) + implied_role_id=prohibited_role2['id'], + ) self.put(url, expected_status=http.client.FORBIDDEN) accepted_role1 = self._create_named_role(accepted_name1) url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( prior_role_id=prior_role['id'], - implied_role_id=accepted_role1['id']) + implied_role_id=accepted_role1['id'], + ) self.put(url, expected_status=http.client.CREATED) def test_trusts_from_implied_role(self): @@ -3088,7 +3441,8 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, trustor_user_id=self.user['id'], trustee_user_id=trustee['id'], project_id=self.project['id'], - role_ids=[self.role_list[0]['id']]) + role_ids=[self.role_list[0]['id']], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = r.result['trust'] @@ -3100,11 +3454,13 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, auth_data = self.build_authentication_request( user_id=trustee['id'], password=trustee['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) r = self.v3_create_token(auth_data) token = r.result['token'] - self.assertThat(token['roles'], - matchers.HasLength(len(self.role_list))) + self.assertThat( + token['roles'], matchers.HasLength(len(self.role_list)) + ) for role in token['roles']: self.assertIn(role, self.role_list) for role in self.role_list: @@ -3127,7 +3483,8 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, trustor_user_id=self.user['id'], trustee_user_id=trustee['id'], project_id=self.project['id'], - role_ids=[self.role_list[0]['id']]) + role_ids=[self.role_list[0]['id']], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = r.result['trust'] @@ -3139,14 +3496,16 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, auth_data = self.build_authentication_request( user_id=trustee['id'], password=trustee['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) r = self.v3_create_token(auth_data) token = r.result['token'] # The token should have the roles implies by the domain specific role, # but not the domain specific role itself. - self.assertThat(token['roles'], - matchers.HasLength(len(self.role_list) - 1)) + self.assertThat( + token['roles'], matchers.HasLength(len(self.role_list) - 1) + ) for role in token['roles']: self.assertIn(role, self.role_list) for role in [self.role_list[1], self.role_list[2]]: @@ -3166,9 +3525,10 @@ class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, global_role_ref['id'], global_role_ref ) - self.put('/roles/%s/implies/%s' % (global_role['id'], - domain_role['id']), - expected_status=http.client.FORBIDDEN) + self.put( + '/roles/%s/implies/%s' % (global_role['id'], domain_role['id']), + expected_status=http.client.FORBIDDEN, + ) class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase): @@ -3176,9 +3536,7 @@ class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase): def create_role(domain_id=None): """Call ``POST /roles``.""" ref = unit.new_role_ref(domain_id=domain_id) - r = self.post( - '/roles', - body={'role': ref}) + r = self.post('/roles', body={'role': ref}) return self.assertValidRoleResponse(r, ref) super(DomainSpecificRoleTests, self).setUp() @@ -3209,7 +3567,8 @@ class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase): # roles back. r = self.get('/roles') self.assertValidRoleListResponse( - r, expected_length=self.existing_global_roles) + r, expected_length=self.existing_global_roles + ) self.assertRoleInListResponse(r, self.global_role1) self.assertRoleInListResponse(r, self.global_role2) self.assertRoleNotInListResponse(r, self.domainA_role1) @@ -3224,39 +3583,46 @@ class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase): def test_update_domain_specific_roles(self): self.domainA_role1['name'] = uuid.uuid4().hex - self.patch('/roles/%(role_id)s' % { - 'role_id': self.domainA_role1['id']}, - body={'role': self.domainA_role1}) + self.patch( + '/roles/%(role_id)s' % {'role_id': self.domainA_role1['id']}, + body={'role': self.domainA_role1}, + ) r = self.get('/roles/%s' % self.domainA_role1['id']) self.assertValidRoleResponse(r, self.domainA_role1) def test_delete_domain_specific_roles(self): # Check delete only removes that one domain role - self.delete('/roles/%(role_id)s' % { - 'role_id': self.domainA_role1['id']}) + self.delete( + '/roles/%(role_id)s' % {'role_id': self.domainA_role1['id']} + ) - self.get('/roles/%s' % self.domainA_role1['id'], - expected_status=http.client.NOT_FOUND) + self.get( + '/roles/%s' % self.domainA_role1['id'], + expected_status=http.client.NOT_FOUND, + ) # Now re-list those in domainA, making sure there's only one left r = self.get('/roles?domain_id=%s' % self.domainA['id']) self.assertValidRoleListResponse(r, expected_length=1) self.assertRoleInListResponse(r, self.domainA_role2) def test_same_domain_assignment(self): - user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainA['id']) + user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainA['id'] + ) projectA = unit.new_project_ref(domain_id=self.domainA['id']) PROVIDERS.resource_api.create_project(projectA['id'], projectA) PROVIDERS.assignment_api.create_grant( - self.domainA_role1['id'], user_id=user['id'], - project_id=projectA['id'] + self.domainA_role1['id'], + user_id=user['id'], + project_id=projectA['id'], ) def test_cross_domain_assignment_valid(self): - user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainB['id']) + user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainB['id'] + ) projectA = unit.new_project_ref(domain_id=self.domainA['id']) PROVIDERS.resource_api.create_project(projectA['id'], projectA) @@ -3264,29 +3630,34 @@ class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase): # Positive: a role on domainA can be assigned to a user from domainB # but only for use on a project from domainA PROVIDERS.assignment_api.create_grant( - self.domainA_role1['id'], user_id=user['id'], - project_id=projectA['id'] + self.domainA_role1['id'], + user_id=user['id'], + project_id=projectA['id'], ) def test_cross_domain_assignment_invalid(self): - user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainB['id']) + user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainB['id'] + ) projectB = unit.new_project_ref(domain_id=self.domainB['id']) PROVIDERS.resource_api.create_project(projectB['id'], projectB) # Negative: a role on domainA can be assigned to a user from domainB # only for a project from domainA - self.assertRaises(exception.DomainSpecificRoleMismatch, - PROVIDERS.assignment_api.create_grant, - self.domainA_role1['id'], - user_id=user['id'], - project_id=projectB['id']) + self.assertRaises( + exception.DomainSpecificRoleMismatch, + PROVIDERS.assignment_api.create_grant, + self.domainA_role1['id'], + user_id=user['id'], + project_id=projectB['id'], + ) def test_cross_domain_implied_roles_authentication(self): # Create a user in domainB - user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainB['id']) + user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainB['id'] + ) # Create project in domainA projectA = unit.new_project_ref(domain_id=self.domainA['id']) @@ -3294,30 +3665,38 @@ class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase): # Now we create an implied rule from a role in domainA to a # role in domainB - self.put('/roles/%s/implies/%s' % - (self.domainA_role1['id'], self.domainB_role['id']), - expected_status=http.client.CREATED) + self.put( + '/roles/%s/implies/%s' + % (self.domainA_role1['id'], self.domainB_role['id']), + expected_status=http.client.CREATED, + ) # A role in domainA can be assigned to a user from domainB # only for a project from domainA PROVIDERS.assignment_api.create_grant( - self.domainA_role1['id'], user_id=user['id'], - project_id=projectA['id'] + self.domainA_role1['id'], + user_id=user['id'], + project_id=projectA['id'], ) # The role assignments should return an empty list since domain roles # can only be used to imply another roles assignments = PROVIDERS.assignment_api.list_role_assignments( - user_id=user['id'], effective=True) + user_id=user['id'], effective=True + ) self.assertEqual([], assignments) # This also means we can't authenticate using the existing assignment auth_body = self.build_authentication_request( user_id=user['id'], password=user['password'], - project_id=projectA['id']) - self.post('/auth/tokens', body=auth_body, - expected_status=http.client.UNAUTHORIZED) + project_id=projectA['id'], + ) + self.post( + '/auth/tokens', + body=auth_body, + expected_status=http.client.UNAUTHORIZED, + ) class ListUserProjectsTestCase(test_v3.RestfulTestCase): @@ -3333,11 +3712,11 @@ class ListUserProjectsTestCase(test_v3.RestfulTestCase): self.users = [] root_domain = unit.new_domain_ref( - id=resource_base.NULL_DOMAIN_ID, - name=resource_base.NULL_DOMAIN_ID + id=resource_base.NULL_DOMAIN_ID, name=resource_base.NULL_DOMAIN_ID + ) + self.resource_api.create_domain( + resource_base.NULL_DOMAIN_ID, root_domain ) - self.resource_api.create_domain(resource_base.NULL_DOMAIN_ID, - root_domain) # Create 3 sets of domain, roles, projects, and users to demonstrate # the right user's data is loaded and only projects they can access @@ -3368,7 +3747,8 @@ class ListUserProjectsTestCase(test_v3.RestfulTestCase): auth = self.build_authentication_request( user_id=user['id'], password=user['password'], - domain_id=domain['id']) + domain_id=domain['id'], + ) self.auths.append(auth) self.domains.append(domain) @@ -3426,14 +3806,18 @@ class ListUserProjectsTestCase(test_v3.RestfulTestCase): auth = self.auths[i] # Try looking for projects with a non-existent domain_id - url = '/users/%s/projects?domain_id=%s' % (user['id'], - uuid.uuid4().hex) + url = '/users/%s/projects?domain_id=%s' % ( + user['id'], + uuid.uuid4().hex, + ) result = self.get(url, auth=auth) self.assertEqual(0, len(result.json['projects'])) # Now try a valid one - url = '/users/%s/projects?domain_id=%s' % (user['id'], - domain['id']) + url = '/users/%s/projects?domain_id=%s' % ( + user['id'], + domain['id'], + ) result = self.get(url, auth=auth) projects_result = result.json['projects'] self.assertEqual(1, len(projects_result)) @@ -3445,37 +3829,35 @@ class ListUserProjectsTestCase(test_v3.RestfulTestCase): # token, regardless of the role assignment on the project. We need to fix # them by using a proper system-scoped admin token to make the call instead # of a project scoped token. -class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, - SystemRoleAssignmentMixin): +class UserSystemRoleAssignmentTestCase( + test_v3.RestfulTestCase, SystemRoleAssignmentMixin +): def test_assign_system_role_to_user(self): system_role_id = self._create_new_role() # assign the user a role on the system - member_url = ( - '/system/users/%(user_id)s/roles/%(role_id)s' % { - 'user_id': self.user['id'], - 'role_id': system_role_id - } - ) + member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { + 'user_id': self.user['id'], + 'role_id': system_role_id, + } self.put(member_url) # validate the role assignment self.head(member_url) # list system roles - collection_url = ( - '/system/users/%(user_id)s/roles' % {'user_id': self.user['id']} - ) + collection_url = '/system/users/%(user_id)s/roles' % { + 'user_id': self.user['id'] + } roles = self.get(collection_url).json_body['roles'] self.assertEqual(len(roles), 1) self.assertEqual(roles[0]['id'], system_role_id) self.head(collection_url, expected_status=http.client.OK) response = self.get( - '/role_assignments?scope.system=all&user.id=%(user_id)s' % { - 'user_id': self.user['id'] - } + '/role_assignments?scope.system=all&user.id=%(user_id)s' + % {'user_id': self.user['id']} ) self.assertValidRoleAssignmentListResponse(response) @@ -3485,16 +3867,15 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) # the response should contain one role assignment for the system role # and one for a role that was setup during setUp(). response = self.get( - '/role_assignments?user.id=%(user_id)s' % { - 'user_id': self.user['id'] - } + '/role_assignments?user.id=%(user_id)s' + % {'user_id': self.user['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=2) @@ -3510,9 +3891,8 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, self.assertEqual(response.json_body['roles'], []) response = self.get( - '/role_assignments?scope.system=all&user.id=%(user_id)s' % { - 'user_id': self.user['id'] - } + '/role_assignments?scope.system=all&user.id=%(user_id)s' + % {'user_id': self.user['id']} ) self.assertEqual(len(response.json_body['role_assignments']), 0) self.assertValidRoleAssignmentListResponse(response) @@ -3523,17 +3903,15 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) # list project role assignments and save the role id of that # assignment, this assignment was created during setUp response = self.get( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.project['id'], - 'user_id': self.user['id'] - } + '/projects/%(project_id)s/users/%(user_id)s/roles' + % {'project_id': self.project['id'], 'user_id': self.user['id']} ) self.assertEqual(len(response.json_body['roles']), 1) project_role_id = response.json_body['roles'][0]['id'] @@ -3552,9 +3930,8 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # make sure the role_assignment API filters correctly based on system # scope response = self.get( - '/role_assignments?scope.system=all&user.id=%(user_id)s' % { - 'user_id': self.user['id'] - } + '/role_assignments?scope.system=all&user.id=%(user_id)s' + % {'user_id': self.user['id']} ) self.assertEqual(len(response.json_body['role_assignments']), 1) system_assignment = response.json_body['role_assignments'][0] @@ -3566,8 +3943,7 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, path = ( '/role_assignments?scope.project.id=%(project_id)s&' 'user.id=%(user_id)s' - ) % {'project_id': self.project['id'], - 'user_id': self.user['id']} + ) % {'project_id': self.project['id'], 'user_id': self.user['id']} response = self.get(path) self.assertEqual(len(response.json_body['role_assignments']), 1) project_assignment = response.json_body['role_assignments'][0] @@ -3579,10 +3955,11 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign a role to the user on a domain domain_member_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles/%(role_id)s' % { + '/domains/%(domain_id)s/users/%(user_id)s/roles/%(role_id)s' + % { 'domain_id': self.user['domain_id'], 'user_id': self.user['id'], - 'role_id': domain_role_id + 'role_id': domain_role_id, } ) self.put(domain_member_url) @@ -3590,16 +3967,14 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) # list domain role assignments response = self.get( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.user['domain_id'], - 'user_id': self.user['id'] - } + '/domains/%(domain_id)s/users/%(user_id)s/roles' + % {'domain_id': self.user['domain_id'], 'user_id': self.user['id']} ) self.assertEqual(len(response.json_body['roles']), 1) @@ -3617,9 +3992,8 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # make sure the role_assignment API filters correctly based on system # scope response = self.get( - '/role_assignments?scope.system=all&user.id=%(user_id)s' % { - 'user_id': self.user['id'] - } + '/role_assignments?scope.system=all&user.id=%(user_id)s' + % {'user_id': self.user['id']} ) self.assertEqual(len(response.json_body['role_assignments']), 1) system_assignment = response.json_body['role_assignments'][0] @@ -3631,8 +4005,7 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, path = ( '/role_assignments?scope.domain.id=%(domain_id)s&' 'user.id=%(user_id)s' - ) % {'domain_id': self.user['domain_id'], - 'user_id': self.user['id']} + ) % {'domain_id': self.user['domain_id'], 'user_id': self.user['id']} response = self.get(path) self.assertEqual(len(response.json_body['role_assignments']), 1) domain_assignment = response.json_body['role_assignments'][0] @@ -3644,7 +4017,7 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) @@ -3657,14 +4030,13 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # check the user does't have the system role assignment member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.head(member_url, expected_status=http.client.NOT_FOUND) response = self.get( - '/role_assignments?scope.system=all&user.id=%(user_id)s' % { - 'user_id': self.user['id'] - } + '/role_assignments?scope.system=all&user.id=%(user_id)s' + % {'user_id': self.user['id']} ) self.assertEqual(len(response.json_body['role_assignments']), 0) self.assertValidRoleAssignmentListResponse(response) @@ -3675,7 +4047,7 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) @@ -3683,9 +4055,8 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, self.head(member_url) response = self.get( - '/role_assignments?scope.system=all&user.id=%(user_id)s' % { - 'user_id': self.user['id'] - } + '/role_assignments?scope.system=all&user.id=%(user_id)s' + % {'user_id': self.user['id']} ) self.assertEqual(len(response.json_body['role_assignments']), 1) self.assertValidRoleAssignmentListResponse(response) @@ -3700,9 +4071,8 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, response = self.get(collection_url) self.assertEqual(len(response.json_body['roles']), 0) response = self.get( - '/role_assignments?scope.system=all&user.id=%(user_id)s' % { - 'user_id': self.user['id'] - } + '/role_assignments?scope.system=all&user.id=%(user_id)s' + % {'user_id': self.user['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=0) @@ -3732,7 +4102,7 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the user a role on the system member_url = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) @@ -3750,8 +4120,9 @@ class UserSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # token, regardless of the role assignment on the project. We need to fix # them by using a proper system-scoped admin token to make the call instead # of a project scoped token. -class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, - SystemRoleAssignmentMixin): +class GroupSystemRoleAssignmentTestCase( + test_v3.RestfulTestCase, SystemRoleAssignmentMixin +): def test_assign_system_role_to_group(self): system_role_id = self._create_new_role() @@ -3760,7 +4131,7 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the role to the group globally member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) @@ -3777,14 +4148,13 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, self.head(collection_url, expected_status=http.client.OK) response = self.get( - '/role_assignments?scope.system=all&group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?scope.system=all&group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=1) self.assertEqual( response.json_body['role_assignments'][0]['role']['id'], - system_role_id + system_role_id, ) def test_assign_system_role_to_non_existant_group_fails(self): @@ -3794,7 +4164,7 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the role to the group globally member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group_id, - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url, expected_status=http.client.NOT_FOUND) @@ -3805,24 +4175,22 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the role to the group globally and on a single project member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) member_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/' - 'roles/%(role_id)s' + '/projects/%(project_id)s/groups/%(group_id)s/' 'roles/%(role_id)s' ) % { 'project_id': self.project_id, 'group_id': group['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) # make sure both assignments exist in the response, there should be two response = self.get( - '/role_assignments?group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=2) @@ -3840,9 +4208,8 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, self.assertEqual(response.json_body['roles'], []) response = self.get( - '/role_assignments?scope.system=all&group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?scope.system=all&group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=0) @@ -3853,16 +4220,16 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the group a role on the system and a role on a project member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { - 'group_id': group['id'], 'role_id': system_role_id + 'group_id': group['id'], + 'role_id': system_role_id, } self.put(member_url) member_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/' - 'roles/%(role_id)s' + '/projects/%(project_id)s/groups/%(group_id)s/' 'roles/%(role_id)s' ) % { 'project_id': self.project_id, 'group_id': group['id'], - 'role_id': project_role_id + 'role_id': project_role_id, } self.put(member_url) @@ -3878,9 +4245,8 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, self.assertNotEqual(role['id'], project_role_id) response = self.get( - '/role_assignments?scope.system=all&group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?scope.system=all&group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=1) @@ -3892,10 +4258,11 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign a role to the group on a domain domain_member_url = ( '/domains/%(domain_id)s/groups/%(group_id)s/' - 'roles/%(role_id)s' % { + 'roles/%(role_id)s' + % { 'domain_id': group['domain_id'], 'group_id': group['id'], - 'role_id': domain_role_id + 'role_id': domain_role_id, } ) self.put(domain_member_url) @@ -3903,15 +4270,14 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the group a role on the system member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) # list domain role assignments response = self.get( - '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': group['domain_id'], 'group_id': group['id'] - } + '/domains/%(domain_id)s/groups/%(group_id)s/roles' + % {'domain_id': group['domain_id'], 'group_id': group['id']} ) self.assertEqual(len(response.json_body['roles']), 1) @@ -3927,9 +4293,8 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, self.assertNotEqual(role['id'], domain_role_id) response = self.get( - '/role_assignments?scope.system=all&group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?scope.system=all&group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=1) @@ -3940,7 +4305,7 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the group a role on the system member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) @@ -3948,14 +4313,13 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, self.head(member_url) response = self.get( - '/role_assignments?scope.system=all&group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?scope.system=all&group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=1) self.assertEqual( response.json_body['role_assignments'][0]['role']['id'], - system_role_id + system_role_id, ) def test_check_group_does_not_have_system_role_without_assignment(self): @@ -3965,14 +4329,13 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # check the group does't have the system role assignment member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.head(member_url, expected_status=http.client.NOT_FOUND) response = self.get( - '/role_assignments?scope.system=all&group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?scope.system=all&group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=0) @@ -3983,7 +4346,7 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the group a role on the system member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) @@ -3991,9 +4354,8 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, self.head(member_url) response = self.get( - '/role_assignments?scope.system=all&group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?scope.system=all&group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertEqual(len(response.json_body['role_assignments']), 1) self.assertValidRoleAssignmentListResponse(response) @@ -4008,9 +4370,8 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, response = self.get(collection_url) self.assertEqual(len(response.json_body['roles']), 0) response = self.get( - '/role_assignments?scope.system=all&group.id=%(group_id)s' % { - 'group_id': group['id'] - } + '/role_assignments?scope.system=all&group.id=%(group_id)s' + % {'group_id': group['id']} ) self.assertValidRoleAssignmentListResponse(response, expected_length=0) @@ -4021,16 +4382,18 @@ class GroupSystemRoleAssignmentTestCase(test_v3.RestfulTestCase, # assign the group a role on the system member_url = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], - 'role_id': system_role_id + 'role_id': system_role_id, } self.put(member_url) # assign the group a role on the system member_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' % - {'project_id': self.project_id, - 'group_id': group['id'], - 'role_id': self.role_id} + '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' + % { + 'project_id': self.project_id, + 'group_id': group['id'], + 'role_id': self.role_id, + } ) self.put(member_url) diff --git a/keystone/tests/unit/test_v3_auth.py b/keystone/tests/unit/test_v3_auth.py index eb7ea0e292..dc8beb4074 100644 --- a/keystone/tests/unit/test_v3_auth.py +++ b/keystone/tests/unit/test_v3_auth.py @@ -59,7 +59,7 @@ class TestMFARules(test_v3.RestfulTestCase): ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) @@ -67,7 +67,7 @@ class TestMFARules(test_v3.RestfulTestCase): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_fernet.MAX_ACTIVE_KEYS + credential_fernet.MAX_ACTIVE_KEYS, ) ) @@ -89,7 +89,8 @@ class TestMFARules(test_v3.RestfulTestCase): def cleanup(testcase): totp_creds = testcase.credential_api.list_credentials_for_user( - testcase.user['id'], type='totp') + testcase.user['id'], type='totp' + ) for cred in totp_creds: testcase.credential_api.delete_credential(cred['id']) @@ -124,7 +125,9 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id)) + project_id=self.project_id, + ) + ) def test_MFA_multi_method_rules_requirements_met_succeeds(self): # validate that multiple auth-methods function if all are specified @@ -142,7 +145,8 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - passcode=totp._generate_totp_passcodes(totp_cred['blob'])[0]) + passcode=totp._generate_totp_passcodes(totp_cred['blob'])[0], + ) self.v3_create_token(auth_req) def test_MFA_single_method_rules_requirements_not_met_fails(self): @@ -161,8 +165,10 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id), - expected_status=http.client.UNAUTHORIZED) + project_id=self.project_id, + ), + expected_status=http.client.UNAUTHORIZED, + ) def test_MFA_multi_method_rules_requirements_not_met_fails(self): # if multiple rules are specified and only one is passed, @@ -180,8 +186,10 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id), - expected_status=http.client.UNAUTHORIZED) + project_id=self.project_id, + ), + expected_status=http.client.UNAUTHORIZED, + ) def test_MFA_rules_bogus_non_existing_auth_method_succeeds(self): # Bogus auth methods are thrown out from rules. @@ -198,14 +206,17 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id)) + project_id=self.project_id, + ) + ) def test_MFA_rules_disabled_MFA_succeeeds(self): # ensure that if MFA is "disableD" authentication succeeds, even if # not enough auth methods are specified rule_list = [['password', 'totp']] - self._update_user_with_MFA_rules(rule_list=rule_list, - rules_enabled=False) + self._update_user_with_MFA_rules( + rule_list=rule_list, rules_enabled=False + ) time = datetime.datetime.utcnow() + datetime.timedelta(seconds=5) # NOTE(notmorgan): Step forward in time to ensure we're not causing # issues with revocation events that occur at the same time as the @@ -217,14 +228,18 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id)) + project_id=self.project_id, + ) + ) def test_MFA_rules_all_bogus_rules_results_in_default_behavior(self): # if all the rules are bogus, the result is the same as the default # behavior, any single password method is sufficient - rule_list = [[uuid.uuid4().hex, uuid.uuid4().hex], - ['BoGus'], - ['NonExistantMethod']] + rule_list = [ + [uuid.uuid4().hex, uuid.uuid4().hex], + ['BoGus'], + ['NonExistantMethod'], + ] self._update_user_with_MFA_rules(rule_list=rule_list) # NOTE(notmorgan): Step forward in time to ensure we're not causing # issues with revocation events that occur at the same time as the @@ -237,7 +252,9 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id)) + project_id=self.project_id, + ) + ) def test_MFA_rules_rescope_works_without_token_method_in_rules(self): rule_list = [['password', 'totp']] @@ -253,11 +270,13 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - passcode=totp._generate_totp_passcodes(totp_cred['blob'])[0]) + passcode=totp._generate_totp_passcodes(totp_cred['blob'])[0], + ) r = self.v3_create_token(auth_data) auth_data = self.build_authentication_request( token=r.headers.get('X-Subject-Token'), - project_id=self.project_id) + project_id=self.project_id, + ) self.v3_create_token(auth_data) def test_MFA_requirements_makes_correct_receipt_for_password(self): @@ -278,19 +297,24 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id), - expected_status=http.client.UNAUTHORIZED) + project_id=self.project_id, + ), + expected_status=http.client.UNAUTHORIZED, + ) self.assertIsNotNone( - response.headers.get(authorization.AUTH_RECEIPT_HEADER)) + response.headers.get(authorization.AUTH_RECEIPT_HEADER) + ) resp_data = response.result # NOTE(adriant): We convert to sets to avoid any potential sorting # related failures since order isn't important, just content. self.assertEqual( - {'password'}, set(resp_data.get('receipt').get('methods'))) + {'password'}, set(resp_data.get('receipt').get('methods')) + ) self.assertEqual( set(frozenset(r) for r in rule_list), - set(frozenset(r) for r in resp_data.get('required_auth_methods'))) + set(frozenset(r) for r in resp_data.get('required_auth_methods')), + ) def test_MFA_requirements_makes_correct_receipt_for_totp(self): # if multiple rules are specified and only one is passed, @@ -311,20 +335,26 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, user_domain_id=self.domain_id, project_id=self.project_id, - passcode=totp._generate_totp_passcodes( - totp_cred['blob'])[0]), - expected_status=http.client.UNAUTHORIZED) + passcode=totp._generate_totp_passcodes(totp_cred['blob'])[ + 0 + ], + ), + expected_status=http.client.UNAUTHORIZED, + ) self.assertIsNotNone( - response.headers.get(authorization.AUTH_RECEIPT_HEADER)) + response.headers.get(authorization.AUTH_RECEIPT_HEADER) + ) resp_data = response.result # NOTE(adriant): We convert to sets to avoid any potential sorting # related failures since order isn't important, just content. self.assertEqual( - {'totp'}, set(resp_data.get('receipt').get('methods'))) + {'totp'}, set(resp_data.get('receipt').get('methods')) + ) self.assertEqual( set(frozenset(r) for r in rule_list), - set(frozenset(r) for r in resp_data.get('required_auth_methods'))) + set(frozenset(r) for r in resp_data.get('required_auth_methods')), + ) def test_MFA_requirements_makes_correct_receipt_for_pass_and_totp(self): # if multiple rules are specified and only one is passed, @@ -346,20 +376,26 @@ class TestMFARules(test_v3.RestfulTestCase): password=self.user['password'], user_domain_id=self.domain_id, project_id=self.project_id, - passcode=totp._generate_totp_passcodes( - totp_cred['blob'])[0]), - expected_status=http.client.UNAUTHORIZED) + passcode=totp._generate_totp_passcodes(totp_cred['blob'])[ + 0 + ], + ), + expected_status=http.client.UNAUTHORIZED, + ) self.assertIsNotNone( - response.headers.get(authorization.AUTH_RECEIPT_HEADER)) + response.headers.get(authorization.AUTH_RECEIPT_HEADER) + ) resp_data = response.result # NOTE(adriant): We convert to sets to avoid any potential sorting # related failures since order isn't important, just content. self.assertEqual( - {'password', 'totp'}, set(resp_data.get('receipt').get('methods'))) + {'password', 'totp'}, set(resp_data.get('receipt').get('methods')) + ) self.assertEqual( set(frozenset(r) for r in rule_list), - set(frozenset(r) for r in resp_data.get('required_auth_methods'))) + set(frozenset(r) for r in resp_data.get('required_auth_methods')), + ) def test_MFA_requirements_returns_correct_required_auth_methods(self): # if multiple rules are specified and only one is passed, @@ -368,7 +404,7 @@ class TestMFARules(test_v3.RestfulTestCase): ['password', 'totp', 'token'], ['password', 'totp'], ['token', 'totp'], - ['BoGusAuThMeTh0dHandl3r'] + ['BoGusAuThMeTh0dHandl3r'], ] expect_rule_list = rule_list = [ ['password', 'totp', 'token'], @@ -388,19 +424,24 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id), - expected_status=http.client.UNAUTHORIZED) + project_id=self.project_id, + ), + expected_status=http.client.UNAUTHORIZED, + ) self.assertIsNotNone( - response.headers.get(authorization.AUTH_RECEIPT_HEADER)) + response.headers.get(authorization.AUTH_RECEIPT_HEADER) + ) resp_data = response.result # NOTE(adriant): We convert to sets to avoid any potential sorting # related failures since order isn't important, just content. self.assertEqual( - {'password'}, set(resp_data.get('receipt').get('methods'))) + {'password'}, set(resp_data.get('receipt').get('methods')) + ) self.assertEqual( set(frozenset(r) for r in expect_rule_list), - set(frozenset(r) for r in resp_data.get('required_auth_methods'))) + set(frozenset(r) for r in resp_data.get('required_auth_methods')), + ) def test_MFA_consuming_receipt_with_totp(self): # if multiple rules are specified and only one is passed, @@ -421,20 +462,25 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, password=self.user['password'], user_domain_id=self.domain_id, - project_id=self.project_id), - expected_status=http.client.UNAUTHORIZED) + project_id=self.project_id, + ), + expected_status=http.client.UNAUTHORIZED, + ) self.assertIsNotNone( - response.headers.get(authorization.AUTH_RECEIPT_HEADER)) + response.headers.get(authorization.AUTH_RECEIPT_HEADER) + ) receipt = response.headers.get(authorization.AUTH_RECEIPT_HEADER) resp_data = response.result # NOTE(adriant): We convert to sets to avoid any potential sorting # related failures since order isn't important, just content. self.assertEqual( - {'password'}, set(resp_data.get('receipt').get('methods'))) + {'password'}, set(resp_data.get('receipt').get('methods')) + ) self.assertEqual( set(frozenset(r) for r in rule_list), - set(frozenset(r) for r in resp_data.get('required_auth_methods'))) + set(frozenset(r) for r in resp_data.get('required_auth_methods')), + ) time = datetime.datetime.utcnow() + datetime.timedelta(seconds=5) with freezegun.freeze_time(time): @@ -446,8 +492,11 @@ class TestMFARules(test_v3.RestfulTestCase): user_id=self.user_id, user_domain_id=self.domain_id, project_id=self.project_id, - passcode=totp._generate_totp_passcodes( - totp_cred['blob'])[0])) + passcode=totp._generate_totp_passcodes(totp_cred['blob'])[ + 0 + ], + ), + ) def test_MFA_consuming_receipt_not_found(self): time = datetime.datetime.utcnow() + datetime.timedelta(seconds=5) @@ -459,8 +508,10 @@ class TestMFARules(test_v3.RestfulTestCase): body=self.build_authentication_request( user_id=self.user_id, user_domain_id=self.domain_id, - project_id=self.project_id), - expected_status=http.client.UNAUTHORIZED) + project_id=self.project_id, + ), + expected_status=http.client.UNAUTHORIZED, + ) self.assertEqual(401, response.result['error']['code']) @@ -473,57 +524,61 @@ class TestAuthInfo(common_auth.AuthTestMixin, testcase.TestCase): auth_data = {'methods': ['abc']} auth_data['abc'] = {'test': 'test'} auth_data = {'identity': auth_data} - self.assertRaises(exception.AuthMethodNotSupported, - auth.core.AuthInfo.create, - auth_data) + self.assertRaises( + exception.AuthMethodNotSupported, + auth.core.AuthInfo.create, + auth_data, + ) def test_missing_auth_method_data(self): auth_data = {'methods': ['password']} auth_data = {'identity': auth_data} - self.assertRaises(exception.ValidationError, - auth.core.AuthInfo.create, - auth_data) + self.assertRaises( + exception.ValidationError, auth.core.AuthInfo.create, auth_data + ) def test_project_name_no_domain(self): auth_data = self.build_authentication_request( - username='test', - password='test', - project_name='abc')['auth'] - self.assertRaises(exception.ValidationError, - auth.core.AuthInfo.create, - auth_data) + username='test', password='test', project_name='abc' + )['auth'] + self.assertRaises( + exception.ValidationError, auth.core.AuthInfo.create, auth_data + ) def test_both_project_and_domain_in_scope(self): auth_data = self.build_authentication_request( user_id='test', password='test', project_name='test', - domain_name='test')['auth'] - self.assertRaises(exception.ValidationError, - auth.core.AuthInfo.create, - auth_data) + domain_name='test', + )['auth'] + self.assertRaises( + exception.ValidationError, auth.core.AuthInfo.create, auth_data + ) def test_get_method_names_duplicates(self): auth_data = self.build_authentication_request( - token='test', - user_id='test', - password='test')['auth'] - auth_data['identity']['methods'] = ['password', 'token', - 'password', 'password'] + token='test', user_id='test', password='test' + )['auth'] + auth_data['identity']['methods'] = [ + 'password', + 'token', + 'password', + 'password', + ] auth_info = auth.core.AuthInfo.create(auth_data) - self.assertEqual(['password', 'token'], - auth_info.get_method_names()) + self.assertEqual(['password', 'token'], auth_info.get_method_names()) def test_get_method_data_invalid_method(self): auth_data = self.build_authentication_request( - user_id='test', - password='test')['auth'] + user_id='test', password='test' + )['auth'] auth_info = auth.core.AuthInfo.create(auth_data) method_name = uuid.uuid4().hex - self.assertRaises(exception.ValidationError, - auth_info.get_method_data, - method_name) + self.assertRaises( + exception.ValidationError, auth_info.get_method_data, method_name + ) class TokenAPITests(object): @@ -533,18 +588,21 @@ class TokenAPITests(object): # resolved in Python for multiple inheritance means that a setUp in this # would get skipped by the testrunner. def doSetUp(self): - r = self.v3_create_token(self.build_authentication_request( - username=self.user['name'], - user_domain_id=self.domain_id, - password=self.user['password'])) + r = self.v3_create_token( + self.build_authentication_request( + username=self.user['name'], + user_domain_id=self.domain_id, + password=self.user['password'], + ) + ) self.v3_token_data = r.result self.v3_token = r.headers.get('X-Subject-Token') self.headers = {'X-Subject-Token': r.headers.get('X-Subject-Token')} def _get_unscoped_token(self): auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.post('/auth/tokens', body=auth_data) self.assertValidUnscopedTokenResponse(r) return r.headers.get('X-Subject-Token') @@ -553,7 +611,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain_id) + domain_id=self.domain_id, + ) r = self.post('/auth/tokens', body=auth_data) self.assertValidDomainScopedTokenResponse(r) return r.headers.get('X-Subject-Token') @@ -562,7 +621,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project_id) + project_id=self.project_id, + ) r = self.post('/auth/tokens', body=auth_data) self.assertValidProjectScopedTokenResponse(r) return r.headers.get('X-Subject-Token') @@ -571,29 +631,33 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=trustee_user['id'], password=trustee_user['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) r = self.post('/auth/tokens', body=auth_data) self.assertValidProjectScopedTokenResponse(r) return r.headers.get('X-Subject-Token') def _create_trust(self, impersonation=False): # Create a trustee user - trustee_user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain_id) + trustee_user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain_id + ) ref = unit.new_trust_ref( trustor_user_id=self.user_id, trustee_user_id=trustee_user['id'], project_id=self.project_id, impersonation=impersonation, - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) # Create a trust r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r) return (trustee_user, trust) - def _validate_token(self, token, - expected_status=http.client.OK, allow_expired=False): + def _validate_token( + self, token, expected_status=http.client.OK, allow_expired=False + ): path = '/v3/auth/tokens' if allow_expired: @@ -601,17 +665,20 @@ class TokenAPITests(object): return self.admin_request( path=path, - headers={'X-Auth-Token': self.get_admin_token(), - 'X-Subject-Token': token}, + headers={ + 'X-Auth-Token': self.get_admin_token(), + 'X-Subject-Token': token, + }, method='GET', - expected_status=expected_status + expected_status=expected_status, ) def _revoke_token(self, token, expected_status=http.client.NO_CONTENT): return self.delete( '/auth/tokens', headers={'x-subject-token': token}, - expected_status=expected_status) + expected_status=expected_status, + ) def _set_user_enabled(self, user, enabled=True): user['enabled'] = enabled @@ -625,16 +692,19 @@ class TokenAPITests(object): # grant the user a role on the project self.put( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { + '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' + % { 'user_id': self.user['id'], 'project_id': project['id'], - 'role_id': self.role['id']}) + 'role_id': self.role['id'], + } + ) # make the new project the user's default project body = {'user': {'default_project_id': project['id']}} - r = self.patch('/users/%(user_id)s' % { - 'user_id': self.user['id']}, - body=body) + r = self.patch( + '/users/%(user_id)s' % {'user_id': self.user['id']}, body=body + ) self.assertValidUserResponse(r) return project @@ -648,9 +718,11 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( token=token, user_id=self.default_domain_user['id'], - password=self.default_domain_user['password']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + password=self.default_domain_user['password'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_token_for_user_without_password_fails(self): user = unit.new_user_ref(domain_id=self.domain['id']) @@ -658,16 +730,17 @@ class TokenAPITests(object): user = PROVIDERS.identity_api.create_user(user) auth_data = self.build_authentication_request( - user_id=user['id'], - password='password') + user_id=user['id'], password='password' + ) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_unscoped_token_by_authenticating_with_unscoped_token(self): auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.v3_create_token(auth_data) self.assertValidUnscopedTokenResponse(r) token_id = r.headers.get('X-Subject-Token') @@ -678,8 +751,8 @@ class TokenAPITests(object): def test_create_unscoped_token_with_user_id(self): auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.v3_create_token(auth_data) self.assertValidUnscopedTokenResponse(r) @@ -687,7 +760,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( username=self.user['name'], user_domain_id=self.domain['id'], - password=self.user['password']) + password=self.user['password'], + ) r = self.v3_create_token(auth_data) self.assertValidUnscopedTokenResponse(r) @@ -695,7 +769,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( username=self.user['name'], user_domain_name=self.domain['name'], - password=self.user['password']) + password=self.user['password'], + ) r = self.v3_create_token(auth_data) self.assertValidUnscopedTokenResponse(r) @@ -707,15 +782,13 @@ class TokenAPITests(object): def test_validate_expired_unscoped_token_returns_not_found(self): # NOTE(lbragstad): We set token expiration to 10 seconds so that we can # use the context manager of freezegun without sqlite issues. - self.config_fixture.config(group='token', - expiration=10) + self.config_fixture.config(group='token', expiration=10) time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: unscoped_token = self._get_unscoped_token() frozen_datetime.tick(delta=datetime.timedelta(seconds=15)) self._validate_token( - unscoped_token, - expected_status=http.client.NOT_FOUND + unscoped_token, expected_status=http.client.NOT_FOUND ) def test_revoke_unscoped_token(self): @@ -723,8 +796,9 @@ class TokenAPITests(object): r = self._validate_token(unscoped_token) self.assertValidUnscopedTokenResponse(r) self._revoke_token(unscoped_token) - self._validate_token(unscoped_token, - expected_status=http.client.NOT_FOUND) + self._validate_token( + unscoped_token, expected_status=http.client.NOT_FOUND + ) def test_create_explicit_unscoped_token(self): self._create_project_and_set_as_default_project() @@ -733,14 +807,16 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - unscoped="unscoped") + unscoped="unscoped", + ) r = self.post('/auth/tokens', body=auth_data, noauth=True) self.assertValidUnscopedTokenResponse(r) def test_disabled_users_default_project_result_in_unscoped_token(self): # create a disabled project to work with project = self.create_new_default_project_for_user( - self.user['id'], self.domain_id, enable_project=False) + self.user['id'], self.domain_id, enable_project=False + ) # assign a role to user for the new project PROVIDERS.assignment_api.add_role_to_user_and_project( @@ -749,8 +825,8 @@ class TokenAPITests(object): # attempt to authenticate without requesting a project auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.v3_create_token(auth_data) self.assertValidUnscopedTokenResponse(r) @@ -760,7 +836,8 @@ class TokenAPITests(object): domain = self.assertValidDomainResponse(r, domain_ref) project = self.create_new_default_project_for_user( - self.user['id'], domain['id']) + self.user['id'], domain['id'] + ) # assign a role to user for the new project PROVIDERS.assignment_api.add_role_to_user_and_project( @@ -769,14 +846,15 @@ class TokenAPITests(object): # now disable the project domain body = {'domain': {'enabled': False}} - r = self.patch('/domains/%(domain_id)s' % {'domain_id': domain['id']}, - body=body) + r = self.patch( + '/domains/%(domain_id)s' % {'domain_id': domain['id']}, body=body + ) self.assertValidDomainResponse(r) # attempt to authenticate without requesting a project auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.v3_create_token(auth_data) self.assertValidUnscopedTokenResponse(r) @@ -789,8 +867,7 @@ class TokenAPITests(object): self._set_user_enabled(self.user, enabled=False) # Ensure validating a token for a disabled user fails self._validate_token( - unscoped_token, - expected_status=http.client.NOT_FOUND + unscoped_token, expected_status=http.client.NOT_FOUND ) def test_unscoped_token_is_invalid_after_enabling_disabled_user(self): @@ -802,15 +879,13 @@ class TokenAPITests(object): self._set_user_enabled(self.user, enabled=False) # Ensure validating a token for a disabled user fails self._validate_token( - unscoped_token, - expected_status=http.client.NOT_FOUND + unscoped_token, expected_status=http.client.NOT_FOUND ) # Enable the user self._set_user_enabled(self.user) # Ensure validating a token for a re-enabled user fails self._validate_token( - unscoped_token, - expected_status=http.client.NOT_FOUND + unscoped_token, expected_status=http.client.NOT_FOUND ) def test_unscoped_token_is_invalid_after_disabling_user_domain(self): @@ -823,8 +898,7 @@ class TokenAPITests(object): PROVIDERS.resource_api.update_domain(self.domain['id'], self.domain) # Ensure validating a token for a disabled user fails self._validate_token( - unscoped_token, - expected_status=http.client.NOT_FOUND + unscoped_token, expected_status=http.client.NOT_FOUND ) def test_unscoped_token_is_invalid_after_changing_user_password(self): @@ -837,21 +911,20 @@ class TokenAPITests(object): PROVIDERS.identity_api.update_user(self.user['id'], self.user) # Ensure updating user's password revokes existing user's tokens self._validate_token( - unscoped_token, - expected_status=http.client.NOT_FOUND + unscoped_token, expected_status=http.client.NOT_FOUND ) def test_create_system_token_with_user_id(self): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path=path) auth_request_body = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - system=True + system=True, ) response = self.v3_create_token(auth_request_body) @@ -860,7 +933,7 @@ class TokenAPITests(object): def test_create_system_token_with_username(self): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path=path) @@ -868,7 +941,7 @@ class TokenAPITests(object): username=self.user['name'], password=self.user['password'], user_domain_id=self.domain['id'], - system=True + system=True, ) response = self.v3_create_token(auth_request_body) @@ -878,17 +951,16 @@ class TokenAPITests(object): auth_request_body = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - system=True + system=True, ) self.v3_create_token( - auth_request_body, - expected_status=http.client.UNAUTHORIZED + auth_request_body, expected_status=http.client.UNAUTHORIZED ) def test_system_token_is_invalid_after_disabling_user(self): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path=path) @@ -896,7 +968,7 @@ class TokenAPITests(object): username=self.user['name'], password=self.user['password'], user_domain_id=self.domain['id'], - system=True + system=True, ) response = self.v3_create_token(auth_request_body) @@ -906,29 +978,22 @@ class TokenAPITests(object): # NOTE(lbragstad): This would make a good test for groups, but # apparently it's not possible to disable a group. - user_ref = { - 'user': { - 'enabled': False - } - } + user_ref = {'user': {'enabled': False}} self.patch( - '/users/%(user_id)s' % {'user_id': self.user['id']}, - body=user_ref + '/users/%(user_id)s' % {'user_id': self.user['id']}, body=user_ref ) self.admin_request( path='/v3/auth/tokens', - headers={'X-Auth-Token': token, - 'X-Subject-Token': token}, + headers={'X-Auth-Token': token, 'X-Subject-Token': token}, method='GET', - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.admin_request( path='/v3/auth/tokens', - headers={'X-Auth-Token': token, - 'X-Subject-Token': token}, + headers={'X-Auth-Token': token, 'X-Subject-Token': token}, method='HEAD', - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) def test_create_system_token_via_system_group_assignment(self): @@ -941,20 +1006,20 @@ class TokenAPITests(object): group = self.post('/groups', body=ref).json_body['group'] path = '/system/groups/%(group_id)s/roles/%(role_id)s' % { 'group_id': group['id'], - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path=path) path = '/groups/%(group_id)s/users/%(user_id)s' % { 'group_id': group['id'], - 'user_id': self.user['id'] + 'user_id': self.user['id'], } self.put(path=path) auth_request_body = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - system=True + system=True, ) response = self.v3_create_token(auth_request_body) self.assertValidSystemScopedTokenResponse(response) @@ -964,7 +1029,7 @@ class TokenAPITests(object): def test_revoke_system_token(self): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path=path) @@ -972,7 +1037,7 @@ class TokenAPITests(object): username=self.user['name'], password=self.user['password'], user_domain_id=self.domain['id'], - system=True + system=True, ) response = self.v3_create_token(auth_request_body) @@ -988,7 +1053,7 @@ class TokenAPITests(object): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role['id'] + 'role_id': system_role['id'], } self.put(path=path) @@ -996,7 +1061,7 @@ class TokenAPITests(object): username=self.user['name'], password=self.user['password'], user_domain_id=self.domain['id'], - system=True + system=True, ) response = self.v3_create_token(auth_request_body) @@ -1013,7 +1078,7 @@ class TokenAPITests(object): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role['id'] + 'role_id': system_role['id'], } self.put(path=path) @@ -1021,7 +1086,7 @@ class TokenAPITests(object): username=self.user['name'], password=self.user['password'], user_domain_id=self.domain['id'], - system=True + system=True, ) response = self.v3_create_token(auth_request_body) self.assertValidSystemScopedTokenResponse(response) @@ -1040,7 +1105,7 @@ class TokenAPITests(object): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': system_role['id'] + 'role_id': system_role['id'], } self.put(path=path) @@ -1048,7 +1113,7 @@ class TokenAPITests(object): username=self.user['name'], password=self.user['password'], user_domain_id=self.domain['id'], - system=True + system=True, ) response = self.v3_create_token(auth_request_body) self.assertValidSystemScopedTokenResponse(response) @@ -1064,82 +1129,106 @@ class TokenAPITests(object): def test_create_domain_token_scoped_with_domain_id_and_user_id(self): # grant the user a role on the domain path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) + self.domain['id'], + self.user['id'], + self.role['id'], + ) self.put(path=path) auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) r = self.v3_create_token(auth_data) self.assertValidDomainScopedTokenResponse(r) def test_create_domain_token_scoped_with_domain_id_and_username(self): # grant the user a role on the domain path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) + self.domain['id'], + self.user['id'], + self.role['id'], + ) self.put(path=path) auth_data = self.build_authentication_request( username=self.user['name'], user_domain_id=self.domain['id'], password=self.user['password'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) r = self.v3_create_token(auth_data) self.assertValidDomainScopedTokenResponse(r) def test_create_domain_token_scoped_with_domain_id(self): # grant the user a role on the domain path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) + self.domain['id'], + self.user['id'], + self.role['id'], + ) self.put(path=path) auth_data = self.build_authentication_request( username=self.user['name'], user_domain_name=self.domain['name'], password=self.user['password'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) r = self.v3_create_token(auth_data) self.assertValidDomainScopedTokenResponse(r) def test_create_domain_token_scoped_with_domain_name(self): # grant the user a role on the domain path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) + self.domain['id'], + self.user['id'], + self.role['id'], + ) self.put(path=path) auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - domain_name=self.domain['name']) + domain_name=self.domain['name'], + ) r = self.v3_create_token(auth_data) self.assertValidDomainScopedTokenResponse(r) def test_create_domain_token_scoped_with_domain_name_and_username(self): # grant the user a role on the domain path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) + self.domain['id'], + self.user['id'], + self.role['id'], + ) self.put(path=path) auth_data = self.build_authentication_request( username=self.user['name'], user_domain_id=self.domain['id'], password=self.user['password'], - domain_name=self.domain['name']) + domain_name=self.domain['name'], + ) r = self.v3_create_token(auth_data) self.assertValidDomainScopedTokenResponse(r) def test_create_domain_token_with_only_domain_name_and_username(self): # grant the user a role on the domain path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) + self.domain['id'], + self.user['id'], + self.role['id'], + ) self.put(path=path) auth_data = self.build_authentication_request( username=self.user['name'], user_domain_name=self.domain['name'], password=self.user['password'], - domain_name=self.domain['name']) + domain_name=self.domain['name'], + ) r = self.v3_create_token(auth_data) self.assertValidDomainScopedTokenResponse(r) @@ -1152,14 +1241,18 @@ class TokenAPITests(object): # grant the domain role to group path = '/domains/%s/groups/%s/roles/%s' % ( - self.domain['id'], group['id'], self.role['id']) + self.domain['id'], + group['id'], + self.role['id'], + ) self.put(path=path) # now get a domain-scoped token auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) r = self.v3_create_token(auth_data) self.assertValidDomainScopedTokenResponse(r) @@ -1167,53 +1260,60 @@ class TokenAPITests(object): """Verify authenticate to a domain with unsafe name fails.""" # Start with url name restrictions off, so we can create the unsafe # named domain - self.config_fixture.config(group='resource', - domain_name_url_safe='off') + self.config_fixture.config( + group='resource', domain_name_url_safe='off' + ) unsafe_name = 'i am not / safe' domain = unit.new_domain_ref(name=unsafe_name) PROVIDERS.resource_api.create_domain(domain['id'], domain) role_member = unit.new_role_ref() PROVIDERS.role_api.create_role(role_member['id'], role_member) PROVIDERS.assignment_api.create_grant( - role_member['id'], - user_id=self.user['id'], - domain_id=domain['id']) + role_member['id'], user_id=self.user['id'], domain_id=domain['id'] + ) auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - domain_name=domain['name']) + domain_name=domain['name'], + ) # Since name url restriction is off, we should be able to authenticate self.v3_create_token(auth_data) # Set the name url restriction to new, which should still allow us to # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='new') + self.config_fixture.config( + group='resource', project_name_url_safe='new' + ) self.v3_create_token(auth_data) # Set the name url restriction to strict and we should fail to # authenticate - self.config_fixture.config(group='resource', - domain_name_url_safe='strict') - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.config_fixture.config( + group='resource', domain_name_url_safe='strict' + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_domain_token_without_grant_returns_unauthorized(self): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) # this fails because the user does not have a role on self.domain - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_validate_domain_scoped_token(self): # Grant user access to domain PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=self.user['id'], - domain_id=self.domain['id'] + self.role['id'], + user_id=self.user['id'], + domain_id=self.domain['id'], ) domain_scoped_token = self._get_domain_scoped_token() r = self._validate_token(domain_scoped_token) @@ -1226,27 +1326,27 @@ class TokenAPITests(object): def test_validate_expired_domain_scoped_token_returns_not_found(self): # Grant user access to domain PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=self.user['id'], - domain_id=self.domain['id'] + self.role['id'], + user_id=self.user['id'], + domain_id=self.domain['id'], ) # NOTE(lbragstad): We set token expiration to 10 seconds so that we can # use the context manager of freezegun without sqlite issues. - self.config_fixture.config(group='token', - expiration=10) + self.config_fixture.config(group='token', expiration=10) time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: domain_scoped_token = self._get_domain_scoped_token() frozen_datetime.tick(delta=datetime.timedelta(seconds=15)) self._validate_token( - domain_scoped_token, - expected_status=http.client.NOT_FOUND + domain_scoped_token, expected_status=http.client.NOT_FOUND ) def test_domain_scoped_token_is_invalid_after_disabling_user(self): # Grant user access to domain PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=self.user['id'], - domain_id=self.domain['id'] + self.role['id'], + user_id=self.user['id'], + domain_id=self.domain['id'], ) domain_scoped_token = self._get_domain_scoped_token() # Make sure the token is valid @@ -1256,15 +1356,15 @@ class TokenAPITests(object): self._set_user_enabled(self.user, enabled=False) # Ensure validating a token for a disabled user fails self._validate_token( - domain_scoped_token, - expected_status=http.client.NOT_FOUND + domain_scoped_token, expected_status=http.client.NOT_FOUND ) def test_domain_scoped_token_is_invalid_after_deleting_grant(self): # Grant user access to domain PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=self.user['id'], - domain_id=self.domain['id'] + self.role['id'], + user_id=self.user['id'], + domain_id=self.domain['id'], ) domain_scoped_token = self._get_domain_scoped_token() # Make sure the token is valid @@ -1272,20 +1372,21 @@ class TokenAPITests(object): self.assertValidDomainScopedTokenResponse(r) # Delete access to domain PROVIDERS.assignment_api.delete_grant( - self.role['id'], user_id=self.user['id'], - domain_id=self.domain['id'] + self.role['id'], + user_id=self.user['id'], + domain_id=self.domain['id'], ) # Ensure validating a token for a disabled user fails self._validate_token( - domain_scoped_token, - expected_status=http.client.NOT_FOUND + domain_scoped_token, expected_status=http.client.NOT_FOUND ) def test_domain_scoped_token_invalid_after_disabling_domain(self): # Grant user access to domain PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=self.user['id'], - domain_id=self.domain['id'] + self.role['id'], + user_id=self.user['id'], + domain_id=self.domain['id'], ) domain_scoped_token = self._get_domain_scoped_token() # Make sure the token is valid @@ -1296,15 +1397,15 @@ class TokenAPITests(object): PROVIDERS.resource_api.update_domain(self.domain['id'], self.domain) # Ensure validating a token for a disabled domain fails self._validate_token( - domain_scoped_token, - expected_status=http.client.NOT_FOUND + domain_scoped_token, expected_status=http.client.NOT_FOUND ) def test_create_project_scoped_token_with_project_id_and_user_id(self): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.v3_create_token(auth_data) self.assertValidProjectScopedTokenResponse(r) @@ -1316,15 +1417,13 @@ class TokenAPITests(object): def test_validate_expired_project_scoped_token_returns_not_found(self): # NOTE(lbragstad): We set token expiration to 10 seconds so that we can # use the context manager of freezegun without sqlite issues. - self.config_fixture.config(group='token', - expiration=10) + self.config_fixture.config(group='token', expiration=10) time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: project_scoped_token = self._get_project_scoped_token() frozen_datetime.tick(delta=datetime.timedelta(seconds=15)) self._validate_token( - project_scoped_token, - expected_status=http.client.NOT_FOUND + project_scoped_token, expected_status=http.client.NOT_FOUND ) def test_revoke_project_scoped_token(self): @@ -1332,16 +1431,17 @@ class TokenAPITests(object): r = self._validate_token(project_scoped_token) self.assertValidProjectScopedTokenResponse(r) self._revoke_token(project_scoped_token) - self._validate_token(project_scoped_token, - expected_status=http.client.NOT_FOUND) + self._validate_token( + project_scoped_token, expected_status=http.client.NOT_FOUND + ) def test_project_scoped_token_is_scoped_to_default_project(self): project = self._create_project_and_set_as_default_project() # attempt to authenticate without requesting a project auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.v3_create_token(auth_data) # ensure the project id in the token matches the default project id @@ -1349,13 +1449,14 @@ class TokenAPITests(object): self.assertEqual(project['id'], r.result['token']['project']['id']) def test_project_scoped_token_no_catalog_is_scoped_to_default_project( - self): + self, + ): project = self._create_project_and_set_as_default_project() # attempt to authenticate without requesting a project or catalog auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True) # ensure the project id in the token matches the default project id @@ -1370,19 +1471,22 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True) # ensure the project id in the token matches the one we as for self.assertValidProjectScopedTokenResponse(r, require_catalog=False) - self.assertEqual(self.project['id'], - r.result['token']['project']['id']) + self.assertEqual( + self.project['id'], r.result['token']['project']['id'] + ) def test_project_scoped_token_catalog_attributes(self): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.v3_create_token(auth_data) catalog = r.result['token']['catalog'] @@ -1406,11 +1510,13 @@ class TokenAPITests(object): # Create a disabled endpoint disabled_endpoint_ref = copy.copy(self.endpoint) disabled_endpoint_id = uuid.uuid4().hex - disabled_endpoint_ref.update({ - 'id': disabled_endpoint_id, - 'enabled': False, - 'interface': 'internal' - }) + disabled_endpoint_ref.update( + { + 'id': disabled_endpoint_id, + 'enabled': False, + 'interface': 'internal', + } + ) PROVIDERS.catalog_api.create_endpoint( disabled_endpoint_id, disabled_endpoint_ref ) @@ -1418,7 +1524,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) resp = self.v3_create_token(auth_data) # make sure the disabled endpoint id isn't in the list of endpoints @@ -1432,7 +1539,8 @@ class TokenAPITests(object): # service is disabled self.assertTrue(self.endpoint['enabled']) PROVIDERS.catalog_api.update_service( - self.endpoint['service_id'], {'enabled': False}) + self.endpoint['service_id'], {'enabled': False} + ) service = PROVIDERS.catalog_api.get_service( self.endpoint['service_id'] ) @@ -1441,7 +1549,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.v3_create_token(auth_data) self.assertEqual([], r.result['token']['catalog']) @@ -1453,16 +1562,19 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=project['id']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + project_id=project['id'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_project_scoped_token_with_username_and_domain_id(self): auth_data = self.build_authentication_request( username=self.user['name'], user_domain_id=self.domain['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.v3_create_token(auth_data) self.assertValidProjectScopedTokenResponse(r) @@ -1471,7 +1583,8 @@ class TokenAPITests(object): username=self.user['name'], user_domain_name=self.domain['name'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) r = self.v3_create_token(auth_data) self.assertValidProjectScopedTokenResponse(r) @@ -1479,45 +1592,53 @@ class TokenAPITests(object): """Verify authenticate to a project with unsafe name fails.""" # Start with url name restrictions off, so we can create the unsafe # named project - self.config_fixture.config(group='resource', - project_name_url_safe='off') + self.config_fixture.config( + group='resource', project_name_url_safe='off' + ) unsafe_name = 'i am not / safe' - project = unit.new_project_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID, - name=unsafe_name) + project = unit.new_project_ref( + domain_id=test_v3.DEFAULT_DOMAIN_ID, name=unsafe_name + ) PROVIDERS.resource_api.create_project(project['id'], project) role_member = unit.new_role_ref() PROVIDERS.role_api.create_role(role_member['id'], role_member) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user['id'], project['id'], role_member['id']) + self.user['id'], project['id'], role_member['id'] + ) auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_name=project['name'], - project_domain_id=test_v3.DEFAULT_DOMAIN_ID) + project_domain_id=test_v3.DEFAULT_DOMAIN_ID, + ) # Since name url restriction is off, we should be able to authenticate self.v3_create_token(auth_data) # Set the name url restriction to new, which should still allow us to # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='new') + self.config_fixture.config( + group='resource', project_name_url_safe='new' + ) self.v3_create_token(auth_data) # Set the name url restriction to strict and we should fail to # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='strict') - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.config_fixture.config( + group='resource', project_name_url_safe='strict' + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_project_scoped_token_fails_if_domain_name_unsafe(self): """Verify authenticate to a project using unsafe domain name fails.""" # Start with url name restrictions off, so we can create the unsafe # named domain - self.config_fixture.config(group='resource', - domain_name_url_safe='off') + self.config_fixture.config( + group='resource', domain_name_url_safe='off' + ) unsafe_name = 'i am not / safe' domain = unit.new_domain_ref(name=unsafe_name) PROVIDERS.resource_api.create_domain(domain['id'], domain) @@ -1529,48 +1650,56 @@ class TokenAPITests(object): PROVIDERS.assignment_api.create_grant( role_member['id'], user_id=self.user['id'], - project_id=project['id']) + project_id=project['id'], + ) # An auth request via project ID, but specifying domain by name auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_name=project['name'], - project_domain_name=domain['name']) + project_domain_name=domain['name'], + ) # Since name url restriction is off, we should be able to authenticate self.v3_create_token(auth_data) # Set the name url restriction to new, which should still allow us to # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='new') + self.config_fixture.config( + group='resource', project_name_url_safe='new' + ) self.v3_create_token(auth_data) # Set the name url restriction to strict and we should fail to # authenticate - self.config_fixture.config(group='resource', - domain_name_url_safe='strict') - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.config_fixture.config( + group='resource', domain_name_url_safe='strict' + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_project_token_with_same_domain_and_project_name(self): """Authenticate to a project with the same name as its domain.""" domain = unit.new_project_ref(is_domain=True) domain = PROVIDERS.resource_api.create_project(domain['id'], domain) - project = unit.new_project_ref(domain_id=domain['id'], - name=domain['name']) + project = unit.new_project_ref( + domain_id=domain['id'], name=domain['name'] + ) PROVIDERS.resource_api.create_project(project['id'], project) role_member = unit.new_role_ref() PROVIDERS.role_api.create_role(role_member['id'], role_member) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user['id'], project['id'], role_member['id']) + self.user['id'], project['id'], role_member['id'] + ) auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_name=project['name'], - project_domain_name=domain['name']) + project_domain_name=domain['name'], + ) r = self.v3_create_token(auth_data) self.assertEqual(project['id'], r.result['token']['project']['id']) @@ -1581,18 +1710,19 @@ class TokenAPITests(object): role_member = unit.new_role_ref() PROVIDERS.role_api.create_role(role_member['id'], role_member) PROVIDERS.assignment_api.create_grant( - role_member['id'], - user_id=self.user['id'], - domain_id=domain['id']) + role_member['id'], user_id=self.user['id'], domain_id=domain['id'] + ) # authentication will fail because the project name is incorrect auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_name=domain['name'], - project_domain_name=domain['name']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + project_domain_name=domain['name'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_project_token_with_disabled_project_domain_fails(self): # create a disabled domain @@ -1605,9 +1735,8 @@ class TokenAPITests(object): # assign some role to self.user for the project in the domain PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user['id'], - project['id'], - self.role_id) + self.user['id'], project['id'], self.role_id + ) # Disable the domain domain['enabled'] = False @@ -1617,27 +1746,33 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=project['id']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + project_id=project['id'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) # user should not be able to auth with project_name & domain auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], project_name=project['name'], - project_domain_id=domain['id']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + project_domain_id=domain['id'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_project_token_with_default_domain_as_project(self): # Authenticate to a project with the default domain as project auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=test_v3.DEFAULT_DOMAIN_ID) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + project_id=test_v3.DEFAULT_DOMAIN_ID, + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_project_scoped_token_is_invalid_after_disabling_user(self): project_scoped_token = self._get_project_scoped_token() @@ -1648,8 +1783,7 @@ class TokenAPITests(object): self._set_user_enabled(self.user, enabled=False) # Ensure validating a token for a disabled user fails self._validate_token( - project_scoped_token, - expected_status=http.client.NOT_FOUND + project_scoped_token, expected_status=http.client.NOT_FOUND ) def test_project_scoped_token_invalid_after_changing_user_password(self): @@ -1662,8 +1796,7 @@ class TokenAPITests(object): PROVIDERS.identity_api.update_user(self.user['id'], self.user) # Ensure updating user's password revokes existing tokens self._validate_token( - project_scoped_token, - expected_status=http.client.NOT_FOUND + project_scoped_token, expected_status=http.client.NOT_FOUND ) def test_project_scoped_token_invalid_after_disabling_project(self): @@ -1676,20 +1809,18 @@ class TokenAPITests(object): PROVIDERS.resource_api.update_project(self.project['id'], self.project) # Ensure validating a token for a disabled project fails self._validate_token( - project_scoped_token, - expected_status=http.client.NOT_FOUND + project_scoped_token, expected_status=http.client.NOT_FOUND ) def test_project_scoped_token_is_invalid_after_deleting_grant(self): # disable caching so that user grant deletion is not hidden # by token caching - self.config_fixture.config( - group='cache', - enabled=False) + self.config_fixture.config(group='cache', enabled=False) # Grant user access to project PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=self.user['id'], - project_id=self.project['id'] + self.role['id'], + user_id=self.user['id'], + project_id=self.project['id'], ) project_scoped_token = self._get_project_scoped_token() # Make sure the token is valid @@ -1697,24 +1828,25 @@ class TokenAPITests(object): self.assertValidProjectScopedTokenResponse(r) # Delete access to project PROVIDERS.assignment_api.delete_grant( - self.role['id'], user_id=self.user['id'], - project_id=self.project['id'] + self.role['id'], + user_id=self.user['id'], + project_id=self.project['id'], ) # Ensure the token has been revoked self._validate_token( - project_scoped_token, - expected_status=http.client.NOT_FOUND + project_scoped_token, expected_status=http.client.NOT_FOUND ) def test_no_access_to_default_project_result_in_unscoped_token(self): # create a disabled project to work with - self.create_new_default_project_for_user(self.user['id'], - self.domain_id) + self.create_new_default_project_for_user( + self.user['id'], self.domain_id + ) # attempt to authenticate without requesting a project auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) + user_id=self.user['id'], password=self.user['password'] + ) r = self.v3_create_token(auth_data) self.assertValidUnscopedTokenResponse(r) @@ -1732,8 +1864,7 @@ class TokenAPITests(object): def test_validate_expired_trust_scoped_token_returns_not_found(self): # NOTE(lbragstad): We set token expiration to 10 seconds so that we can # use the context manager of freezegun without sqlite issues. - self.config_fixture.config(group='token', - expiration=10) + self.config_fixture.config(group='token', expiration=10) time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: trustee_user, trust = self._create_trust() @@ -1742,8 +1873,7 @@ class TokenAPITests(object): ) frozen_datetime.tick(delta=datetime.timedelta(seconds=15)) self._validate_token( - trust_scoped_token, - expected_status=http.client.NOT_FOUND + trust_scoped_token, expected_status=http.client.NOT_FOUND ) def test_validate_a_trust_scoped_token_impersonated(self): @@ -1760,8 +1890,9 @@ class TokenAPITests(object): r = self._validate_token(trust_scoped_token) self.assertValidProjectScopedTokenResponse(r) self._revoke_token(trust_scoped_token) - self._validate_token(trust_scoped_token, - expected_status=http.client.NOT_FOUND) + self._validate_token( + trust_scoped_token, expected_status=http.client.NOT_FOUND + ) def test_trust_scoped_token_is_invalid_after_disabling_trustee(self): trustee_user, trust = self._create_trust() @@ -1777,8 +1908,7 @@ class TokenAPITests(object): ) # Ensure validating a token for a disabled user fails self._validate_token( - trust_scoped_token, - expected_status=http.client.NOT_FOUND + trust_scoped_token, expected_status=http.client.NOT_FOUND ) def test_trust_token_is_invalid_when_trustee_domain_disabled(self): @@ -1788,8 +1918,9 @@ class TokenAPITests(object): new_domain_ref['id'], new_domain_ref ) - trustee_ref = unit.create_user(PROVIDERS.identity_api, - domain_id=new_domain_ref['id']) + trustee_ref = unit.create_user( + PROVIDERS.identity_api, domain_id=new_domain_ref['id'] + ) new_project_ref = unit.new_project_ref(domain_id=self.domain_id) PROVIDERS.resource_api.create_project( @@ -1800,14 +1931,17 @@ class TokenAPITests(object): PROVIDERS.assignment_api.create_grant( self.role['id'], user_id=self.user_id, - project_id=new_project_ref['id']) + project_id=new_project_ref['id'], + ) - trust_ref = unit.new_trust_ref(trustor_user_id=self.user_id, - trustee_user_id=trustee_ref['id'], - expires=dict(minutes=1), - project_id=new_project_ref['id'], - impersonation=True, - role_ids=[self.role['id']]) + trust_ref = unit.new_trust_ref( + trustor_user_id=self.user_id, + trustee_user_id=trustee_ref['id'], + expires=dict(minutes=1), + project_id=new_project_ref['id'], + impersonation=True, + role_ids=[self.role['id']], + ) resp = self.post('/OS-TRUST/trusts', body={'trust': trust_ref}) self.assertValidTrustResponse(resp, trust_ref) @@ -1817,7 +1951,8 @@ class TokenAPITests(object): trust_auth_data = self.build_authentication_request( user_id=trustee_ref['id'], password=trustee_ref['password'], - trust_id=trust_id) + trust_id=trust_id, + ) trust_scoped_token = self.get_requested_token(trust_auth_data) # ensure the project-scoped token from the trust is valid @@ -1826,11 +1961,13 @@ class TokenAPITests(object): disable_body = {'domain': {'enabled': False}} self.patch( '/domains/%(domain_id)s' % {'domain_id': new_domain_ref['id']}, - body=disable_body) + body=disable_body, + ) # ensure the project-scoped token from the trust is invalid - self._validate_token(trust_scoped_token, - expected_status=http.client.NOT_FOUND) + self._validate_token( + trust_scoped_token, expected_status=http.client.NOT_FOUND + ) def test_trust_scoped_token_invalid_after_changing_trustee_password(self): trustee_user, trust = self._create_trust() @@ -1845,8 +1982,7 @@ class TokenAPITests(object): ) # Ensure updating trustee's password revokes existing tokens self._validate_token( - trust_scoped_token, - expected_status=http.client.NOT_FOUND + trust_scoped_token, expected_status=http.client.NOT_FOUND ) def test_trust_scoped_token_is_invalid_after_disabling_trustor(self): @@ -1861,8 +1997,7 @@ class TokenAPITests(object): PROVIDERS.identity_api.update_user(self.user['id'], trustor_update_ref) # Ensure validating a token for a disabled user fails self._validate_token( - trust_scoped_token, - expected_status=http.client.NOT_FOUND + trust_scoped_token, expected_status=http.client.NOT_FOUND ) def test_trust_scoped_token_invalid_after_changing_trustor_password(self): @@ -1877,8 +2012,7 @@ class TokenAPITests(object): PROVIDERS.identity_api.update_user(self.user['id'], trustor_update_ref) # Ensure updating trustor's password revokes existing user's tokens self._validate_token( - trust_scoped_token, - expected_status=http.client.NOT_FOUND + trust_scoped_token, expected_status=http.client.NOT_FOUND ) def test_trust_scoped_token_invalid_after_disabled_trustor_domain(self): @@ -1896,8 +2030,7 @@ class TokenAPITests(object): PROVIDERS.identity_api.update_user(self.user['id'], trustor_update_ref) # Ensure updating trustor's password revokes existing user's tokens self._validate_token( - trust_scoped_token, - expected_status=http.client.NOT_FOUND + trust_scoped_token, expected_status=http.client.NOT_FOUND ) def test_default_fixture_scope_token(self): @@ -1907,52 +2040,64 @@ class TokenAPITests(object): expires = self.v3_token_data['token']['expires_at'] # rescope the token - r = self.v3_create_token(self.build_authentication_request( - token=self.v3_token, - project_id=self.project_id)) + r = self.v3_create_token( + self.build_authentication_request( + token=self.v3_token, project_id=self.project_id + ) + ) self.assertValidProjectScopedTokenResponse(r) # ensure token expiration stayed the same self.assertTimestampEqual(expires, r.result['token']['expires_at']) def test_check_token(self): - self.head('/auth/tokens', headers=self.headers, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers=self.headers, + expected_status=http.client.OK, + ) def test_validate_token(self): r = self.get('/auth/tokens', headers=self.headers) self.assertValidUnscopedTokenResponse(r) def test_validate_missing_subject_token(self): - self.get('/auth/tokens', - expected_status=http.client.NOT_FOUND) + self.get('/auth/tokens', expected_status=http.client.NOT_FOUND) def test_validate_missing_auth_token(self): self.admin_request( method='GET', path='/v3/projects', token=None, - expected_status=http.client.UNAUTHORIZED) + expected_status=http.client.UNAUTHORIZED, + ) def test_validate_token_nocatalog(self): - v3_token = self.get_requested_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) + v3_token = self.get_requested_token( + self.build_authentication_request( + user_id=self.user['id'], + password=self.user['password'], + project_id=self.project['id'], + ) + ) r = self.get( - '/auth/tokens?nocatalog', - headers={'X-Subject-Token': v3_token}) + '/auth/tokens?nocatalog', headers={'X-Subject-Token': v3_token} + ) self.assertValidProjectScopedTokenResponse(r, require_catalog=False) def test_is_admin_token_by_ids(self): self.config_fixture.config( group='resource', admin_project_domain_name=self.domain['name'], - admin_project_name=self.project['name']) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) + admin_project_name=self.project['name'], + ) + r = self.v3_create_token( + self.build_authentication_request( + user_id=self.user['id'], + password=self.user['password'], + project_id=self.project['id'], + ) + ) self.assertValidProjectScopedTokenResponse(r, is_admin_project=True) v3_token = r.headers.get('X-Subject-Token') r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) @@ -1962,12 +2107,16 @@ class TokenAPITests(object): self.config_fixture.config( group='resource', admin_project_domain_name=self.domain['name'], - admin_project_name=self.project['name']) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_domain_name=self.domain['name'], - project_name=self.project['name'])) + admin_project_name=self.project['name'], + ) + r = self.v3_create_token( + self.build_authentication_request( + user_id=self.user['id'], + password=self.user['password'], + project_domain_name=self.domain['name'], + project_name=self.project['name'], + ) + ) self.assertValidProjectScopedTokenResponse(r, is_admin_project=True) v3_token = r.headers.get('X-Subject-Token') r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) @@ -1977,11 +2126,15 @@ class TokenAPITests(object): self.config_fixture.config( group='resource', admin_project_domain_name=self.domain['name'], - admin_project_name=uuid.uuid4().hex) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) + admin_project_name=uuid.uuid4().hex, + ) + r = self.v3_create_token( + self.build_authentication_request( + user_id=self.user['id'], + password=self.user['password'], + project_id=self.project['id'], + ) + ) self.assertValidProjectScopedTokenResponse(r, is_admin_project=False) v3_token = r.headers.get('X-Subject-Token') r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) @@ -1991,11 +2144,15 @@ class TokenAPITests(object): self.config_fixture.config( group='resource', admin_project_domain_name=uuid.uuid4().hex, - admin_project_name=self.project['name']) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) + admin_project_name=self.project['name'], + ) + r = self.v3_create_token( + self.build_authentication_request( + user_id=self.user['id'], + password=self.user['password'], + project_id=self.project['id'], + ) + ) self.assertValidProjectScopedTokenResponse(r, is_admin_project=False) v3_token = r.headers.get('X-Subject-Token') r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) @@ -2003,12 +2160,15 @@ class TokenAPITests(object): def test_only_admin_project_set_acts_as_non_admin(self): self.config_fixture.config( - group='resource', - admin_project_name=self.project['name']) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) + group='resource', admin_project_name=self.project['name'] + ) + r = self.v3_create_token( + self.build_authentication_request( + user_id=self.user['id'], + password=self.user['password'], + project_id=self.project['id'], + ) + ) self.assertValidProjectScopedTokenResponse(r, is_admin_project=None) v3_token = r.headers.get('X-Subject-Token') r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) @@ -2069,8 +2229,9 @@ class TokenAPITests(object): def test_create_implied_role_shows_in_v3_domain_token(self): self.config_fixture.config(group='token') PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=self.user['id'], - domain_id=self.domain['id'] + self.role['id'], + user_id=self.user['id'], + domain_id=self.domain['id'], ) self._create_implied_role_shows_in_v3_token(True) @@ -2224,8 +2385,9 @@ class TokenAPITests(object): new_role = self._create_role(domain_id=self.domain_id) PROVIDERS.assignment_api.create_grant( - new_role['id'], user_id=self.user['id'], - project_id=self.project['id'] + new_role['id'], + user_id=self.user['id'], + project_id=self.project['id'], ) implied = self._create_implied_role(new_role['id']) @@ -2240,21 +2402,27 @@ class TokenAPITests(object): def test_remove_all_roles_from_scope_result_in_404(self): # create a new user - new_user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain['id']) + new_user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain['id'] + ) # give the new user a role on a project path = '/projects/%s/users/%s/roles/%s' % ( - self.project['id'], new_user['id'], self.role['id']) + self.project['id'], + new_user['id'], + self.role['id'], + ) self.put(path=path) # authenticate as the new user and get a project-scoped token auth_data = self.build_authentication_request( user_id=new_user['id'], password=new_user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) subject_token_id = self.v3_create_token(auth_data).headers.get( - 'X-Subject-Token') + 'X-Subject-Token' + ) # make sure the project-scoped token is valid headers = {'X-Subject-Token': subject_token_id} @@ -2263,50 +2431,64 @@ class TokenAPITests(object): # remove the roles from the user for the given scope path = '/projects/%s/users/%s/roles/%s' % ( - self.project['id'], new_user['id'], self.role['id']) + self.project['id'], + new_user['id'], + self.role['id'], + ) self.delete(path=path) # token validation should now result in 404 - self.get('/auth/tokens', headers=headers, - expected_status=http.client.NOT_FOUND) + self.get( + '/auth/tokens', + headers=headers, + expected_status=http.client.NOT_FOUND, + ) def test_create_token_with_nonexistant_user_id_fails(self): auth_data = self.build_authentication_request( - user_id=uuid.uuid4().hex, - password=self.user['password']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + user_id=uuid.uuid4().hex, password=self.user['password'] + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_token_with_nonexistant_username_fails(self): auth_data = self.build_authentication_request( username=uuid.uuid4().hex, user_domain_id=self.domain['id'], - password=self.user['password']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + password=self.user['password'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_token_with_nonexistant_domain_id_fails(self): auth_data = self.build_authentication_request( username=self.user['name'], user_domain_id=uuid.uuid4().hex, - password=self.user['password']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + password=self.user['password'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_token_with_nonexistant_domain_name_fails(self): auth_data = self.build_authentication_request( username=self.user['name'], user_domain_name=uuid.uuid4().hex, - password=self.user['password']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + password=self.user['password'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_token_with_wrong_password_fails(self): auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=uuid.uuid4().hex) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + user_id=self.user['id'], password=uuid.uuid4().hex + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_user_and_group_roles_scoped_token(self): """Test correct roles are returned in scoped token. @@ -2349,12 +2531,8 @@ class TokenAPITests(object): group2 = unit.new_group_ref(domain_id=domainA['id']) group2 = PROVIDERS.identity_api.create_group(group2) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group1['id'] - ) - PROVIDERS.identity_api.add_user_to_group( - user2['id'], group2['id'] - ) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id']) + PROVIDERS.identity_api.add_user_to_group(user2['id'], group2['id']) # Now create all the roles and assign them role_list = [] @@ -2379,15 +2557,17 @@ class TokenAPITests(object): role_list[4]['id'], group_id=group1['id'], domain_id=domainA['id'] ) PROVIDERS.assignment_api.create_grant( - role_list[5]['id'], group_id=group1['id'], - project_id=projectA['id'] + role_list[5]['id'], + group_id=group1['id'], + project_id=projectA['id'], ) PROVIDERS.assignment_api.create_grant( role_list[6]['id'], group_id=group2['id'], domain_id=domainA['id'] ) PROVIDERS.assignment_api.create_grant( - role_list[7]['id'], group_id=group2['id'], - project_id=projectA['id'] + role_list[7]['id'], + group_id=group2['id'], + project_id=projectA['id'], ) # First, get a project scoped token - which should @@ -2396,7 +2576,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=user1['id'], password=user1['password'], - project_id=projectA['id']) + project_id=projectA['id'], + ) r = self.v3_create_token(auth_data) token = self.assertValidScopedTokenResponse(r) roles_ids = [] @@ -2410,7 +2591,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_id=user1['id'], password=user1['password'], - domain_id=domainA['id']) + domain_id=domainA['id'], + ) r = self.v3_create_token(auth_data) token = self.assertValidScopedTokenResponse(r) roles_ids = [] @@ -2423,13 +2605,12 @@ class TokenAPITests(object): # Finally, add user1 to the 2nd group, and get a new # scoped token - the extra role should now be included # by virtue of the 2nd group - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group2['id'] - ) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group2['id']) auth_data = self.build_authentication_request( user_id=user1['id'], password=user1['password'], - project_id=projectA['id']) + project_id=projectA['id'], + ) r = self.v3_create_token(auth_data) token = self.assertValidScopedTokenResponse(r) roles_ids = [] @@ -2447,8 +2628,9 @@ class TokenAPITests(object): PROVIDERS.resource_api.create_domain(domain1['id'], domain1) project1 = unit.new_project_ref(domain_id=domain1['id']) PROVIDERS.resource_api.create_project(project1['id'], project1) - user_foo = unit.create_user(PROVIDERS.identity_api, - domain_id=test_v3.DEFAULT_DOMAIN_ID) + user_foo = unit.create_user( + PROVIDERS.identity_api, domain_id=test_v3.DEFAULT_DOMAIN_ID + ) role_member = unit.new_role_ref() PROVIDERS.role_api.create_role(role_member['id'], role_member) role_admin = unit.new_role_ref() @@ -2469,19 +2651,23 @@ class TokenAPITests(object): PROVIDERS.assignment_api.create_grant( user_id=user_foo['id'], project_id=project1['id'], - role_id=role_member['id']) + role_id=role_member['id'], + ) PROVIDERS.assignment_api.create_grant( group_id=new_group['id'], project_id=project1['id'], - role_id=role_admin['id']) + role_id=role_admin['id'], + ) PROVIDERS.assignment_api.create_grant( user_id=user_foo['id'], domain_id=domain1['id'], - role_id=role_foo_domain1['id']) + role_id=role_foo_domain1['id'], + ) PROVIDERS.assignment_api.create_grant( group_id=new_group['id'], domain_id=domain1['id'], - role_id=role_group_domain1['id']) + role_id=role_group_domain1['id'], + ) # Get a scoped token for the project auth_data = self.build_authentication_request( @@ -2489,7 +2675,8 @@ class TokenAPITests(object): user_domain_id=test_v3.DEFAULT_DOMAIN_ID, password=user_foo['password'], project_name=project1['name'], - project_domain_id=domain1['id']) + project_domain_id=domain1['id'], + ) r = self.v3_create_token(auth_data) scoped_token = self.assertValidScopedTokenResponse(r) @@ -2517,15 +2704,22 @@ class TokenAPITests(object): super(auth.core.AuthContext, self).__init__(*args, **kwargs) auth_contexts.append(self) - self.useFixture(fixtures.MockPatch( - 'keystone.auth.core.AuthContext.__init__', new_init)) + self.useFixture( + fixtures.MockPatch( + 'keystone.auth.core.AuthContext.__init__', new_init + ) + ) with app.test_client() as c: - c.environ_base.update(self.build_external_auth_environ( - self.default_domain_user['name'])) + c.environ_base.update( + self.build_external_auth_environ( + self.default_domain_user['name'] + ) + ) auth_req = self.build_authentication_request() c.post('/v3/auth/tokens', json=auth_req) - self.assertEqual(self.default_domain_user['id'], - auth_contexts[-1]['user_id']) + self.assertEqual( + self.default_domain_user['id'], auth_contexts[-1]['user_id'] + ) # Now test to make sure the user name can, itself, contain the # '@' character. @@ -2534,23 +2728,30 @@ class TokenAPITests(object): self.default_domain_user['id'], user ) with app.test_client() as c: - c.environ_base.update(self.build_external_auth_environ( - user['name'])) + c.environ_base.update( + self.build_external_auth_environ(user['name']) + ) auth_req = self.build_authentication_request() c.post('/v3/auth/tokens', json=auth_req) - self.assertEqual(self.default_domain_user['id'], - auth_contexts[-1]['user_id']) - self.assertEqual(self.default_domain_user['id'], - auth_contexts[-1]['user_id']) + self.assertEqual( + self.default_domain_user['id'], auth_contexts[-1]['user_id'] + ) + self.assertEqual( + self.default_domain_user['id'], auth_contexts[-1]['user_id'] + ) def test_remote_user_no_domain(self): app = self.loadapp() with app.test_client() as c: - c.environ_base.update(self.build_external_auth_environ( - self.user['name'])) + c.environ_base.update( + self.build_external_auth_environ(self.user['name']) + ) auth_request = self.build_authentication_request() - c.post('/v3/auth/tokens', json=auth_request, - expected_status_code=http.client.UNAUTHORIZED) + c.post( + '/v3/auth/tokens', + json=auth_request, + expected_status_code=http.client.UNAUTHORIZED, + ) def test_remote_user_and_password(self): # both REMOTE_USER and password methods must pass. @@ -2560,7 +2761,8 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_domain_id=self.default_domain_user['domain_id'], username=self.default_domain_user['name'], - password=self.default_domain_user['password']) + password=self.default_domain_user['password'], + ) c.post('/v3/auth/tokens', json=auth_data) def test_remote_user_and_explicit_external(self): @@ -2569,13 +2771,17 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_domain_id=self.domain['id'], username=self.user['name'], - password=self.user['password']) + password=self.user['password'], + ) auth_data['auth']['identity']['methods'] = ["password", "external"] auth_data['auth']['identity']['external'] = {} app = self.loadapp() with app.test_client() as c: - c.post('/v3/auth/tokens', json=auth_data, - expected_status_code=http.client.UNAUTHORIZED) + c.post( + '/v3/auth/tokens', + json=auth_data, + expected_status_code=http.client.UNAUTHORIZED, + ) def test_remote_user_bad_password(self): # both REMOTE_USER and password methods must pass. @@ -2583,15 +2789,19 @@ class TokenAPITests(object): auth_data = self.build_authentication_request( user_domain_id=self.domain['id'], username=self.user['name'], - password='badpassword') + password='badpassword', + ) with app.test_client() as c: - c.post('/v3/auth/tokens', json=auth_data, - expected_status_code=http.client.UNAUTHORIZED) + c.post( + '/v3/auth/tokens', + json=auth_data, + expected_status_code=http.client.UNAUTHORIZED, + ) def test_fetch_expired_allow_expired(self): - self.config_fixture.config(group='token', - expiration=10, - allow_expired_window=20) + self.config_fixture.config( + group='token', expiration=10, allow_expired_window=20 + ) time = datetime.datetime.utcnow() with freezegun.freeze_time(time) as frozen_datetime: token = self._get_project_scoped_token() @@ -2610,9 +2820,11 @@ class TokenAPITests(object): # and then if we're passed the allow_expired_window it will fail # anyway raises expired when now > expiration + window frozen_datetime.tick(delta=datetime.timedelta(seconds=22)) - self._validate_token(token, - allow_expired=True, - expected_status=http.client.NOT_FOUND) + self._validate_token( + token, + allow_expired=True, + expected_status=http.client.NOT_FOUND, + ) def test_system_scoped_token_works_with_domain_specific_drivers(self): self.config_fixture.config( @@ -2631,9 +2843,9 @@ class TokenAPITests(object): c.get('/v3/users', headers=headers) def test_fetch_expired_allow_expired_in_expired_window(self): - self.config_fixture.config(group='token', - expiration=10, - allow_expired_window=20) + self.config_fixture.config( + group='token', expiration=10, allow_expired_window=20 + ) time = datetime.datetime.utcnow() with freezegun.freeze_time(time): token = self._get_project_scoped_token() @@ -2656,26 +2868,28 @@ class TokenAPITests(object): PROVIDERS.resource_api.create_project( new_project_ref['id'], new_project_ref ) - new_user = unit.create_user(PROVIDERS.identity_api, - domain_id=new_domain_ref['id'], - project_id=new_project_ref['id']) + new_user = unit.create_user( + PROVIDERS.identity_api, + domain_id=new_domain_ref['id'], + project_id=new_project_ref['id'], + ) PROVIDERS.assignment_api.create_grant( self.role['id'], user_id=new_user['id'], - project_id=new_project_ref['id']) + project_id=new_project_ref['id'], + ) return new_user, new_domain_ref, new_project_ref - def _create_certificates(self, - root_dn=None, - server_dn=None, - client_dn=None): + def _create_certificates( + self, root_dn=None, server_dn=None, client_dn=None + ): root_subj = unit.create_dn( country_name='jp', state_or_province_name='kanagawa', locality_name='kawasaki', organization_name='fujitsu', organizational_unit_name='test', - common_name='root' + common_name='root', ) if root_dn: root_subj = unit.update_dn(root_subj, root_dn) @@ -2687,43 +2901,41 @@ class TokenAPITests(object): locality_name='kawasaki', organization_name='fujitsu', organizational_unit_name='test', - common_name='keystone.local' + common_name='keystone.local', ) if server_dn: keystone_subj = unit.update_dn(keystone_subj, server_dn) ks_cert, ks_key = unit.create_certificate( - keystone_subj, ca=root_cert, ca_key=root_key) + keystone_subj, ca=root_cert, ca_key=root_key + ) client_subj = unit.create_dn( country_name='jp', state_or_province_name='kanagawa', locality_name='kawasaki', organization_name='fujitsu', organizational_unit_name='test', - common_name='client' + common_name='client', ) if client_dn: client_subj = unit.update_dn(client_subj, client_dn) client_cert, client_key = unit.create_certificate( - client_subj, ca=root_cert, ca_key=root_key) + client_subj, ca=root_cert, ca_key=root_key + ) return root_cert, root_key, ks_cert, ks_key, client_cert, client_key def _get_cert_content(self, cert): return cert.public_bytes(Encoding.PEM).decode('ascii') - def _get_oauth2_access_token(self, client_id, client_cert_content, - expected_status=http.client.OK): + def _get_oauth2_access_token( + self, client_id, client_cert_content, expected_status=http.client.OK + ): headers = { 'Content-Type': 'application/x-www-form-urlencoded', } - data = { - 'grant_type': 'client_credentials', - 'client_id': client_id - } - extra_environ = { - 'SSL_CLIENT_CERT': client_cert_content - } + data = {'grant_type': 'client_credentials', 'client_id': client_id} + extra_environ = {'SSL_CLIENT_CERT': client_cert_content} data = parse.urlencode(data).encode() resp = self.post( '/OS-OAUTH2/token', @@ -2732,7 +2944,8 @@ class TokenAPITests(object): convert=False, body=data, environ=extra_environ, - expected_status=expected_status) + expected_status=expected_status, + ) return resp def _create_mapping(self): @@ -2746,38 +2959,23 @@ class TokenAPITests(object): 'name': '{0}', 'id': '{1}', 'email': '{2}', - 'domain': { - 'name': '{3}', - 'id': '{4}' - } + 'domain': {'name': '{3}', 'id': '{4}'}, } } ], 'remote': [ - { - 'type': 'SSL_CLIENT_SUBJECT_DN_CN' - }, - { - 'type': 'SSL_CLIENT_SUBJECT_DN_UID' - }, - { - 'type': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS' - }, - { - 'type': 'SSL_CLIENT_SUBJECT_DN_O' - }, - { - 'type': 'SSL_CLIENT_SUBJECT_DN_DC' - }, + {'type': 'SSL_CLIENT_SUBJECT_DN_CN'}, + {'type': 'SSL_CLIENT_SUBJECT_DN_UID'}, + {'type': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS'}, + {'type': 'SSL_CLIENT_SUBJECT_DN_O'}, + {'type': 'SSL_CLIENT_SUBJECT_DN_DC'}, { 'type': 'SSL_CLIENT_ISSUER_DN_CN', - 'any_one_of': [ - 'root' - ] - } - ] + 'any_one_of': ['root'], + }, + ], } - ] + ], } PROVIDERS.federation_api.create_mapping(mapping['id'], mapping) @@ -2788,16 +2986,14 @@ class TokenAPITests(object): user, user_domain, _ = self._create_project_user() *_, client_cert, _ = self._create_certificates( - root_dn=unit.create_dn( - common_name='root' - ), + root_dn=unit.create_dn(common_name='root'), client_dn=unit.create_dn( common_name=user['name'], user_id=user['id'], email_address=user['email'], organization_name=user_domain['name'], - domain_component=user_domain['id'] - ) + domain_component=user_domain['id'], + ), ) cert_content = self._get_cert_content(client_cert) @@ -2814,17 +3010,20 @@ class TokenAPITests(object): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] + 'X-Auth-Token': json_resp['access_token'], }, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) CONF.token.cache_on_issue = cache_on_issue CONF.token.caching = caching @@ -2842,13 +3041,15 @@ class TokenDataTests(object): PROVIDERS.assignment_api.create_grant( self.role['id'], user_id=self.default_domain_user['id'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) domain_scoped_token = self.get_requested_token( self.build_authentication_request( user_id=self.default_domain_user['id'], password=self.default_domain_user['password'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) ) self.headers['X-Subject-Token'] = domain_scoped_token r = self.get('/auth/tokens', headers=self.headers) @@ -2860,7 +3061,8 @@ class TokenDataTests(object): self.build_authentication_request( user_id=self.default_domain_user['id'], password=self.default_domain_user['password'], - project_id=self.default_domain_project['id']) + project_id=self.default_domain_project['id'], + ) ) self.headers['X-Subject-Token'] = project_scoped_token r = self.get('/auth/tokens', headers=self.headers) @@ -2872,31 +3074,37 @@ class TokenDataTests(object): # populate the response result with some extra data r.result['token'][u'extra'] = str(uuid.uuid4().hex) - self.assertRaises(exception.SchemaValidationError, - self.assertValidUnscopedTokenResponse, - r) + self.assertRaises( + exception.SchemaValidationError, + self.assertValidUnscopedTokenResponse, + r, + ) def test_extra_data_in_domain_scoped_token_fails_validation(self): # ensure domain scoped token response contains the appropriate data PROVIDERS.assignment_api.create_grant( self.role['id'], user_id=self.default_domain_user['id'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) domain_scoped_token = self.get_requested_token( self.build_authentication_request( user_id=self.default_domain_user['id'], password=self.default_domain_user['password'], - domain_id=self.domain['id']) + domain_id=self.domain['id'], + ) ) self.headers['X-Subject-Token'] = domain_scoped_token r = self.get('/auth/tokens', headers=self.headers) # populate the response result with some extra data r.result['token'][u'extra'] = str(uuid.uuid4().hex) - self.assertRaises(exception.SchemaValidationError, - self.assertValidDomainScopedTokenResponse, - r) + self.assertRaises( + exception.SchemaValidationError, + self.assertValidDomainScopedTokenResponse, + r, + ) def test_extra_data_in_project_scoped_token_fails_validation(self): # ensure project scoped token responses contains the appropriate data @@ -2904,67 +3112,77 @@ class TokenDataTests(object): self.build_authentication_request( user_id=self.default_domain_user['id'], password=self.default_domain_user['password'], - project_id=self.default_domain_project['id']) + project_id=self.default_domain_project['id'], + ) ) self.headers['X-Subject-Token'] = project_scoped_token resp = self.get('/auth/tokens', headers=self.headers) # populate the response result with some extra data resp.result['token'][u'extra'] = str(uuid.uuid4().hex) - self.assertRaises(exception.SchemaValidationError, - self.assertValidProjectScopedTokenResponse, - resp) + self.assertRaises( + exception.SchemaValidationError, + self.assertValidProjectScopedTokenResponse, + resp, + ) class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase): def config_overrides(self): super(AllowRescopeScopedTokenDisabledTests, self).config_overrides() self.config_fixture.config( - group='token', - allow_rescope_scoped_token=False) + group='token', allow_rescope_scoped_token=False + ) def test_rescoping_v3_to_v3_disabled(self): self.v3_create_token( self.build_authentication_request( - token=self.get_scoped_token(), - project_id=self.project_id), - expected_status=http.client.FORBIDDEN) + token=self.get_scoped_token(), project_id=self.project_id + ), + expected_status=http.client.FORBIDDEN, + ) def test_rescoped_domain_token_disabled(self): self.domainA = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(self.domainA['id'], self.domainA) PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=self.user['id'], - domain_id=self.domainA['id'] + self.role['id'], + user_id=self.user['id'], + domain_id=self.domainA['id'], ) unscoped_token = self.get_requested_token( self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'])) + user_id=self.user['id'], password=self.user['password'] + ) + ) # Get a domain-scoped token from the unscoped token domain_scoped_token = self.get_requested_token( self.build_authentication_request( - token=unscoped_token, - domain_id=self.domainA['id'])) + token=unscoped_token, domain_id=self.domainA['id'] + ) + ) self.v3_create_token( self.build_authentication_request( - token=domain_scoped_token, - project_id=self.project_id), - expected_status=http.client.FORBIDDEN) + token=domain_scoped_token, project_id=self.project_id + ), + expected_status=http.client.FORBIDDEN, + ) -class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests, - TokenDataTests): +class TestFernetTokenAPIs( + test_v3.RestfulTestCase, TokenAPITests, TokenDataTests +): def config_overrides(self): super(TestFernetTokenAPIs, self).config_overrides() - self.config_fixture.config(group='token', provider='fernet', - cache_on_issue=True) + self.config_fixture.config( + group='token', provider='fernet', cache_on_issue=True + ) self.useFixture( ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) @@ -2979,26 +3197,36 @@ class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests, def test_validate_tampered_unscoped_token_fails(self): unscoped_token = self._get_unscoped_token() - tampered_token = (unscoped_token[:50] + uuid.uuid4().hex + - unscoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http.client.NOT_FOUND) + tampered_token = ( + unscoped_token[:50] + uuid.uuid4().hex + unscoped_token[50 + 32 :] + ) + self._validate_token( + tampered_token, expected_status=http.client.NOT_FOUND + ) def test_validate_tampered_project_scoped_token_fails(self): project_scoped_token = self._get_project_scoped_token() - tampered_token = (project_scoped_token[:50] + uuid.uuid4().hex + - project_scoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http.client.NOT_FOUND) + tampered_token = ( + project_scoped_token[:50] + + uuid.uuid4().hex + + project_scoped_token[50 + 32 :] + ) + self._validate_token( + tampered_token, expected_status=http.client.NOT_FOUND + ) def test_validate_tampered_trust_scoped_token_fails(self): trustee_user, trust = self._create_trust() trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) # Get a trust scoped token - tampered_token = (trust_scoped_token[:50] + uuid.uuid4().hex + - trust_scoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http.client.NOT_FOUND) + tampered_token = ( + trust_scoped_token[:50] + + uuid.uuid4().hex + + trust_scoped_token[50 + 32 :] + ) + self._validate_token( + tampered_token, expected_status=http.client.NOT_FOUND + ) def test_trust_scoped_token_is_invalid_after_disabling_trustor(self): # NOTE(amakarov): have to override this test for non-persistent tokens @@ -3014,16 +3242,16 @@ class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests, PROVIDERS.identity_api.update_user(self.user['id'], trustor_update_ref) # Ensure validating a token for a disabled user fails self._validate_token( - trust_scoped_token, - expected_status=http.client.FORBIDDEN + trust_scoped_token, expected_status=http.client.FORBIDDEN ) class TestJWSTokenAPIs(test_v3.RestfulTestCase, TokenAPITests, TokenDataTests): def config_overrides(self): super(TestJWSTokenAPIs, self).config_overrides() - self.config_fixture.config(group='token', provider='jws', - cache_on_issue=True) + self.config_fixture.config( + group='token', provider='jws', cache_on_issue=True + ) self.useFixture(ksfixtures.JWSKeyRepository(self.config_fixture)) def setUp(self): @@ -3037,26 +3265,36 @@ class TestJWSTokenAPIs(test_v3.RestfulTestCase, TokenAPITests, TokenDataTests): def test_validate_tampered_unscoped_token_fails(self): unscoped_token = self._get_unscoped_token() - tampered_token = (unscoped_token[:50] + uuid.uuid4().hex + - unscoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http.client.NOT_FOUND) + tampered_token = ( + unscoped_token[:50] + uuid.uuid4().hex + unscoped_token[50 + 32 :] + ) + self._validate_token( + tampered_token, expected_status=http.client.NOT_FOUND + ) def test_validate_tampered_project_scoped_token_fails(self): project_scoped_token = self._get_project_scoped_token() - tampered_token = (project_scoped_token[:50] + uuid.uuid4().hex + - project_scoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http.client.NOT_FOUND) + tampered_token = ( + project_scoped_token[:50] + + uuid.uuid4().hex + + project_scoped_token[50 + 32 :] + ) + self._validate_token( + tampered_token, expected_status=http.client.NOT_FOUND + ) def test_validate_tampered_trust_scoped_token_fails(self): trustee_user, trust = self._create_trust() trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) # Get a trust scoped token - tampered_token = (trust_scoped_token[:50] + uuid.uuid4().hex + - trust_scoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http.client.NOT_FOUND) + tampered_token = ( + trust_scoped_token[:50] + + uuid.uuid4().hex + + trust_scoped_token[50 + 32 :] + ) + self._validate_token( + tampered_token, expected_status=http.client.NOT_FOUND + ) def test_trust_scoped_token_is_invalid_after_disabling_trustor(self): # NOTE(amakarov): have to override this test for non-persistent tokens @@ -3072,8 +3310,7 @@ class TestJWSTokenAPIs(test_v3.RestfulTestCase, TokenAPITests, TokenDataTests): PROVIDERS.identity_api.update_user(self.user['id'], trustor_update_ref) # Ensure validating a token for a disabled user fails self._validate_token( - trust_scoped_token, - expected_status=http.client.FORBIDDEN + trust_scoped_token, expected_status=http.client.FORBIDDEN ) @@ -3083,14 +3320,13 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): def config_overrides(self): super(TestTokenRevokeById, self).config_overrides() self.config_fixture.config( - group='token', - provider='fernet', - revoke_by_id=False) + group='token', provider='fernet', revoke_by_id=False + ) self.useFixture( ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) @@ -3134,14 +3370,17 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): ) # Now create some users - self.user1 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainA['id']) + self.user1 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainA['id'] + ) - self.user2 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainB['id']) + self.user2 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainB['id'] + ) - self.user3 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainB['id']) + self.user3 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainB['id'] + ) self.group1 = unit.new_group_ref(domain_id=self.domainA['id']) self.group1 = PROVIDERS.identity_api.create_group(self.group1) @@ -3168,44 +3407,55 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): PROVIDERS.role_api.create_role(self.role2['id'], self.role2) PROVIDERS.assignment_api.create_grant( - self.role2['id'], user_id=self.user1['id'], - domain_id=self.domainA['id'] + self.role2['id'], + user_id=self.user1['id'], + domain_id=self.domainA['id'], ) PROVIDERS.assignment_api.create_grant( - self.role1['id'], user_id=self.user1['id'], - project_id=self.projectA['id'] + self.role1['id'], + user_id=self.user1['id'], + project_id=self.projectA['id'], ) PROVIDERS.assignment_api.create_grant( - self.role1['id'], user_id=self.user2['id'], - project_id=self.projectA['id'] + self.role1['id'], + user_id=self.user2['id'], + project_id=self.projectA['id'], ) PROVIDERS.assignment_api.create_grant( - self.role1['id'], user_id=self.user3['id'], - project_id=self.projectA['id'] + self.role1['id'], + user_id=self.user3['id'], + project_id=self.projectA['id'], ) PROVIDERS.assignment_api.create_grant( - self.role1['id'], group_id=self.group1['id'], - project_id=self.projectA['id'] + self.role1['id'], + group_id=self.group1['id'], + project_id=self.projectA['id'], ) def test_unscoped_token_remains_valid_after_role_assignment(self): unscoped_token = self.get_requested_token( self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'])) + user_id=self.user1['id'], password=self.user1['password'] + ) + ) scoped_token = self.get_requested_token( self.build_authentication_request( - token=unscoped_token, - project_id=self.projectA['id'])) + token=unscoped_token, project_id=self.projectA['id'] + ) + ) # confirm both tokens are valid - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': scoped_token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': unscoped_token}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': scoped_token}, + expected_status=http.client.OK, + ) # create a new role role = unit.new_role_ref() @@ -3213,18 +3463,25 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): # assign a new role self.put( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { + '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' + % { 'project_id': self.projectA['id'], 'user_id': self.user1['id'], - 'role_id': role['id']}) + 'role_id': role['id'], + } + ) # both tokens should remain valid - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': scoped_token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': unscoped_token}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': scoped_token}, + expected_status=http.client.OK, + ) def test_deleting_user_grant_revokes_token(self): """Test deleting a user grant revokes token. @@ -3239,52 +3496,68 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=self.project['id']) + project_id=self.project['id'], + ) token = self.get_requested_token(auth_data) # Confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.OK, + ) # Delete the grant, which should invalidate the token grant_url = ( '/projects/%(project_id)s/users/%(user_id)s/' - 'roles/%(role_id)s' % { + 'roles/%(role_id)s' + % { 'project_id': self.project['id'], 'user_id': self.user['id'], - 'role_id': self.role['id']}) + 'role_id': self.role['id'], + } + ) self.delete(grant_url) - self.head('/auth/tokens', token=token, - expected_status=http.client.UNAUTHORIZED) + self.head( + '/auth/tokens', + token=token, + expected_status=http.client.UNAUTHORIZED, + ) def role_data_fixtures(self): self.projectC = unit.new_project_ref(domain_id=self.domainA['id']) PROVIDERS.resource_api.create_project( self.projectC['id'], self.projectC ) - self.user4 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainB['id']) - self.user5 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainA['id']) - self.user6 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domainA['id']) + self.user4 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainB['id'] + ) + self.user5 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainA['id'] + ) + self.user6 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domainA['id'] + ) PROVIDERS.identity_api.add_user_to_group( self.user5['id'], self.group1['id'] ) PROVIDERS.assignment_api.create_grant( - self.role1['id'], group_id=self.group1['id'], - project_id=self.projectB['id'] + self.role1['id'], + group_id=self.group1['id'], + project_id=self.projectB['id'], ) PROVIDERS.assignment_api.create_grant( - self.role2['id'], user_id=self.user4['id'], - project_id=self.projectC['id'] + self.role2['id'], + user_id=self.user4['id'], + project_id=self.projectC['id'], ) PROVIDERS.assignment_api.create_grant( - self.role1['id'], user_id=self.user6['id'], - project_id=self.projectA['id'] + self.role1['id'], + user_id=self.user6['id'], + project_id=self.projectA['id'], ) PROVIDERS.assignment_api.create_grant( - self.role1['id'], user_id=self.user6['id'], - domain_id=self.domainA['id'] + self.role1['id'], + user_id=self.user6['id'], + domain_id=self.domainA['id'], ) def test_deleting_role_revokes_token(self): @@ -3319,67 +3592,92 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.user1['id'], password=self.user1['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) tokenA = self.get_requested_token(auth_data) auth_data = self.build_authentication_request( user_id=self.user5['id'], password=self.user5['password'], - project_id=self.projectB['id']) + project_id=self.projectB['id'], + ) tokenB = self.get_requested_token(auth_data) auth_data = self.build_authentication_request( user_id=self.user4['id'], password=self.user4['password'], - project_id=self.projectC['id']) + project_id=self.projectC['id'], + ) tokenC = self.get_requested_token(auth_data) auth_data = self.build_authentication_request( user_id=self.user6['id'], password=self.user6['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) tokenD = self.get_requested_token(auth_data) auth_data = self.build_authentication_request( user_id=self.user6['id'], password=self.user6['password'], - domain_id=self.domainA['id']) + domain_id=self.domainA['id'], + ) tokenE = self.get_requested_token(auth_data) # Confirm tokens are valid - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenA}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenB}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenC}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenD}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenE}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenA}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenB}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenC}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenD}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenE}, + expected_status=http.client.OK, + ) # Delete the role, which should invalidate the tokens role_url = '/roles/%s' % self.role1['id'] self.delete(role_url) # Check the tokens that used role1 is invalid - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenA}, - expected_status=http.client.NOT_FOUND) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenB}, - expected_status=http.client.NOT_FOUND) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenD}, - expected_status=http.client.NOT_FOUND) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenE}, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenA}, + expected_status=http.client.NOT_FOUND, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenB}, + expected_status=http.client.NOT_FOUND, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenD}, + expected_status=http.client.NOT_FOUND, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenE}, + expected_status=http.client.NOT_FOUND, + ) # ...but the one using role2 is still valid - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenC}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': tokenC}, + expected_status=http.client.OK, + ) def test_domain_user_role_assignment_maintains_token(self): """Test user-domain role assignment maintains existing token. @@ -3394,78 +3692,104 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.user1['id'], password=self.user1['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) token = self.get_requested_token(auth_data) # Confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.OK, + ) # Assign a role, which should not affect the token grant_url = ( '/domains/%(domain_id)s/users/%(user_id)s/' - 'roles/%(role_id)s' % { + 'roles/%(role_id)s' + % { 'domain_id': self.domainB['id'], 'user_id': self.user1['id'], - 'role_id': self.role1['id']}) + 'role_id': self.role1['id'], + } + ) self.put(grant_url) - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.OK, + ) def test_disabling_project_revokes_token(self): token = self.get_requested_token( self.build_authentication_request( user_id=self.user3['id'], password=self.user3['password'], - project_id=self.projectA['id'])) + project_id=self.projectA['id'], + ) + ) # confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.OK, + ) # disable the project, which should invalidate the token self.patch( '/projects/%(project_id)s' % {'project_id': self.projectA['id']}, - body={'project': {'enabled': False}}) + body={'project': {'enabled': False}}, + ) # user should no longer have access to the project - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.NOT_FOUND, + ) self.v3_create_token( self.build_authentication_request( user_id=self.user3['id'], password=self.user3['password'], - project_id=self.projectA['id']), - expected_status=http.client.UNAUTHORIZED) + project_id=self.projectA['id'], + ), + expected_status=http.client.UNAUTHORIZED, + ) def test_deleting_project_revokes_token(self): token = self.get_requested_token( self.build_authentication_request( user_id=self.user3['id'], password=self.user3['password'], - project_id=self.projectA['id'])) + project_id=self.projectA['id'], + ) + ) # confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.OK, + ) # delete the project, which should invalidate the token self.delete( - '/projects/%(project_id)s' % {'project_id': self.projectA['id']}) + '/projects/%(project_id)s' % {'project_id': self.projectA['id']} + ) # user should no longer have access to the project - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.NOT_FOUND, + ) self.v3_create_token( self.build_authentication_request( user_id=self.user3['id'], password=self.user3['password'], - project_id=self.projectA['id']), - expected_status=http.client.UNAUTHORIZED) + project_id=self.projectA['id'], + ), + expected_status=http.client.UNAUTHORIZED, + ) def test_deleting_group_grant_revokes_tokens(self): """Test deleting a group grant revokes tokens. @@ -3484,54 +3808,76 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.user1['id'], password=self.user1['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) token1 = self.get_requested_token(auth_data) auth_data = self.build_authentication_request( user_id=self.user2['id'], password=self.user2['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) token2 = self.get_requested_token(auth_data) auth_data = self.build_authentication_request( user_id=self.user3['id'], password=self.user3['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) token3 = self.get_requested_token(auth_data) # Confirm tokens are valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token1}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': token3}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token1}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token2}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token3}, + expected_status=http.client.OK, + ) # Delete the group grant, which should invalidate the # tokens for user1 and user2 grant_url = ( '/projects/%(project_id)s/groups/%(group_id)s/' - 'roles/%(role_id)s' % { + 'roles/%(role_id)s' + % { 'project_id': self.projectA['id'], 'group_id': self.group1['id'], - 'role_id': self.role1['id']}) + 'role_id': self.role1['id'], + } + ) self.delete(grant_url) PROVIDERS.assignment_api.delete_grant( - role_id=self.role1['id'], project_id=self.projectA['id'], - user_id=self.user1['id'] + role_id=self.role1['id'], + project_id=self.projectA['id'], + user_id=self.user1['id'], ) PROVIDERS.assignment_api.delete_grant( - role_id=self.role1['id'], project_id=self.projectA['id'], - user_id=self.user2['id'] + role_id=self.role1['id'], + project_id=self.projectA['id'], + user_id=self.user2['id'], + ) + self.head( + '/auth/tokens', + token=token1, + expected_status=http.client.UNAUTHORIZED, + ) + self.head( + '/auth/tokens', + token=token2, + expected_status=http.client.UNAUTHORIZED, ) - self.head('/auth/tokens', token=token1, - expected_status=http.client.UNAUTHORIZED) - self.head('/auth/tokens', token=token2, - expected_status=http.client.UNAUTHORIZED) # But user3's token should be invalid too as revocation is done for # scope role & project - self.head('/auth/tokens', - headers={'X-Subject-Token': token3}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token3}, + expected_status=http.client.OK, + ) def test_domain_group_role_assignment_maintains_token(self): """Test domain-group role assignment maintains existing token. @@ -3546,23 +3892,31 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.user1['id'], password=self.user1['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) token = self.get_requested_token(auth_data) # Confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.OK, + ) # Delete the grant, which should invalidate the token grant_url = ( '/domains/%(domain_id)s/groups/%(group_id)s/' - 'roles/%(role_id)s' % { + 'roles/%(role_id)s' + % { 'domain_id': self.domainB['id'], 'group_id': self.group1['id'], - 'role_id': self.role1['id']}) + 'role_id': self.role1['id'], + } + ) self.put(grant_url) - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token}, + expected_status=http.client.OK, + ) def test_group_membership_changes_revokes_token(self): """Test add/removal to/from group revokes token. @@ -3582,39 +3936,53 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.user1['id'], password=self.user1['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) token1 = self.get_requested_token(auth_data) auth_data = self.build_authentication_request( user_id=self.user2['id'], password=self.user2['password'], - project_id=self.projectA['id']) + project_id=self.projectA['id'], + ) token2 = self.get_requested_token(auth_data) # Confirm tokens are valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token1}, - expected_status=http.client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token1}, + expected_status=http.client.OK, + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token2}, + expected_status=http.client.OK, + ) # Remove user1 from group1, which should invalidate # the token - self.delete('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group1['id'], - 'user_id': self.user1['id']}) - self.head('/auth/tokens', - headers={'X-Subject-Token': token1}, - expected_status=http.client.NOT_FOUND) + self.delete( + '/groups/%(group_id)s/users/%(user_id)s' + % {'group_id': self.group1['id'], 'user_id': self.user1['id']} + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token1}, + expected_status=http.client.NOT_FOUND, + ) # But user2's token should still be valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token2}, + expected_status=http.client.OK, + ) # Adding user2 to a group should not invalidate token - self.put('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group2['id'], - 'user_id': self.user2['id']}) - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http.client.OK) + self.put( + '/groups/%(group_id)s/users/%(user_id)s' + % {'group_id': self.group2['id'], 'user_id': self.user2['id']} + ) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': token2}, + expected_status=http.client.OK, + ) def test_removing_role_assignment_does_not_affect_other_users(self): """Revoking a role from one user should not affect other users.""" @@ -3622,10 +3990,13 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): with freezegun.freeze_time(time) as frozen_datetime: # This group grant is not needed for the test self.delete( - '/projects/%(p_id)s/groups/%(g_id)s/roles/%(r_id)s' % - {'p_id': self.projectA['id'], - 'g_id': self.group1['id'], - 'r_id': self.role1['id']}) + '/projects/%(p_id)s/groups/%(g_id)s/roles/%(r_id)s' + % { + 'p_id': self.projectA['id'], + 'g_id': self.group1['id'], + 'r_id': self.role1['id'], + } + ) # NOTE(lbragstad): Here we advance the clock one second to pass # into the threshold of a new second because we just persisted a @@ -3640,55 +4011,74 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): self.build_authentication_request( user_id=self.user1['id'], password=self.user1['password'], - project_id=self.projectA['id'])) + project_id=self.projectA['id'], + ) + ) user3_token = self.get_requested_token( self.build_authentication_request( user_id=self.user3['id'], password=self.user3['password'], - project_id=self.projectA['id'])) + project_id=self.projectA['id'], + ) + ) # delete relationships between user1 and projectA from setUp self.delete( - '/projects/%(p_id)s/users/%(u_id)s/roles/%(r_id)s' % { + '/projects/%(p_id)s/users/%(u_id)s/roles/%(r_id)s' + % { 'p_id': self.projectA['id'], 'u_id': self.user1['id'], - 'r_id': self.role1['id']}) + 'r_id': self.role1['id'], + } + ) # authorization for the first user should now fail - self.head('/auth/tokens', - headers={'X-Subject-Token': user1_token}, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': user1_token}, + expected_status=http.client.NOT_FOUND, + ) self.v3_create_token( self.build_authentication_request( user_id=self.user1['id'], password=self.user1['password'], - project_id=self.projectA['id']), - expected_status=http.client.UNAUTHORIZED) + project_id=self.projectA['id'], + ), + expected_status=http.client.UNAUTHORIZED, + ) # authorization for the second user should still succeed - self.head('/auth/tokens', - headers={'X-Subject-Token': user3_token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': user3_token}, + expected_status=http.client.OK, + ) self.v3_create_token( self.build_authentication_request( user_id=self.user3['id'], password=self.user3['password'], - project_id=self.projectA['id'])) + project_id=self.projectA['id'], + ) + ) def test_deleting_project_deletes_grants(self): # This is to make it a little bit more pretty with PEP8 - role_path = ('/projects/%(project_id)s/users/%(user_id)s/' - 'roles/%(role_id)s') - role_path = role_path % {'user_id': self.user['id'], - 'project_id': self.projectA['id'], - 'role_id': self.role['id']} + role_path = ( + '/projects/%(project_id)s/users/%(user_id)s/' 'roles/%(role_id)s' + ) + role_path = role_path % { + 'user_id': self.user['id'], + 'project_id': self.projectA['id'], + 'role_id': self.role['id'], + } # grant the user a role on the project self.put(role_path) # delete the project, which should remove the roles self.delete( - '/projects/%(project_id)s' % {'project_id': self.projectA['id']}) + '/projects/%(project_id)s' % {'project_id': self.projectA['id']} + ) # Make sure that we get a 404 Not Found when heading that role. self.head(role_path, expected_status=http.client.NOT_FOUND) @@ -3700,53 +4090,68 @@ class TestTokenRevokeById(test_v3.RestfulTestCase): unscoped_token = self.get_requested_token( self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'])) + user_id=self.user1['id'], password=self.user1['password'] + ) + ) # Get a project-scoped token from the unscoped token project_scoped_token = self.get_requested_token( self.build_authentication_request( - token=unscoped_token, - project_id=self.projectA['id'])) + token=unscoped_token, project_id=self.projectA['id'] + ) + ) # Get a domain-scoped token from the unscoped token domain_scoped_token = self.get_requested_token( self.build_authentication_request( - token=unscoped_token, - domain_id=self.domainA['id'])) + token=unscoped_token, domain_id=self.domainA['id'] + ) + ) # revoke the project-scoped token. - self.delete('/auth/tokens', - headers={'X-Subject-Token': project_scoped_token}) + self.delete( + '/auth/tokens', headers={'X-Subject-Token': project_scoped_token} + ) # The project-scoped token is invalidated. - self.head('/auth/tokens', - headers={'X-Subject-Token': project_scoped_token}, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': project_scoped_token}, + expected_status=http.client.NOT_FOUND, + ) # The unscoped token should still be valid. - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': unscoped_token}, + expected_status=http.client.OK, + ) # The domain-scoped token should still be valid. - self.head('/auth/tokens', - headers={'X-Subject-Token': domain_scoped_token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': domain_scoped_token}, + expected_status=http.client.OK, + ) # revoke the domain-scoped token. - self.delete('/auth/tokens', - headers={'X-Subject-Token': domain_scoped_token}) + self.delete( + '/auth/tokens', headers={'X-Subject-Token': domain_scoped_token} + ) # The domain-scoped token is invalid. - self.head('/auth/tokens', - headers={'X-Subject-Token': domain_scoped_token}, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': domain_scoped_token}, + expected_status=http.client.NOT_FOUND, + ) # The unscoped token should still be valid. - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'X-Subject-Token': unscoped_token}, + expected_status=http.client.OK, + ) class TestTokenRevokeApi(TestTokenRevokeById): @@ -3755,14 +4160,13 @@ class TestTokenRevokeApi(TestTokenRevokeById): def config_overrides(self): super(TestTokenRevokeApi, self).config_overrides() self.config_fixture.config( - group='token', - provider='fernet', - revoke_by_id=False) + group='token', provider='fernet', revoke_by_id=False + ) self.useFixture( ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) @@ -3772,9 +4176,9 @@ class TestTokenRevokeApi(TestTokenRevokeById): self.assertEqual(project_id, events[0]['project_id']) self.assertIsNotNone(events[0]['issued_before']) self.assertIsNotNone(events_response['links']) - del (events_response['events'][0]['issued_before']) - del (events_response['events'][0]['revoked_at']) - del (events_response['links']) + del events_response['events'][0]['issued_before'] + del events_response['events'][0]['revoked_at'] + del events_response['links'] expected_response = {'events': [{'project_id': project_id}]} self.assertEqual(expected_response, events_response) @@ -3785,9 +4189,9 @@ class TestTokenRevokeApi(TestTokenRevokeById): self.assertEqual(v, events[0].get(k)) self.assertIsNotNone(events[0]['issued_before']) self.assertIsNotNone(events_response['links']) - del (events_response['events'][0]['issued_before']) - del (events_response['events'][0]['revoked_at']) - del (events_response['links']) + del events_response['events'][0]['issued_before'] + del events_response['events'][0]['revoked_at'] + del events_response['links'] expected_response = {'events': [kwargs]} self.assertEqual(expected_response, events_response) @@ -3798,31 +4202,35 @@ class TestTokenRevokeApi(TestTokenRevokeById): response = self.get('/auth/tokens', headers=headers).json_body['token'] self.delete('/auth/tokens', headers=headers) - self.head('/auth/tokens', headers=headers, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers=headers, + expected_status=http.client.NOT_FOUND, + ) events_response = self.get('/OS-REVOKE/events').json_body - self.assertValidRevokedTokenResponse(events_response, - audit_id=response['audit_ids'][0]) + self.assertValidRevokedTokenResponse( + events_response, audit_id=response['audit_ids'][0] + ) def test_get_revoke_by_id_false_returns_gone(self): - self.get('/auth/tokens/OS-PKI/revoked', - expected_status=http.client.GONE) + self.get( + '/auth/tokens/OS-PKI/revoked', expected_status=http.client.GONE + ) def test_head_revoke_by_id_false_returns_gone(self): - self.head('/auth/tokens/OS-PKI/revoked', - expected_status=http.client.GONE) + self.head( + '/auth/tokens/OS-PKI/revoked', expected_status=http.client.GONE + ) def test_revoke_by_id_true_returns_forbidden(self): - self.config_fixture.config( - group='token', - revoke_by_id=True) + self.config_fixture.config(group='token', revoke_by_id=True) self.get( '/auth/tokens/OS-PKI/revoked', - expected_status=http.client.FORBIDDEN + expected_status=http.client.FORBIDDEN, ) self.head( '/auth/tokens/OS-PKI/revoked', - expected_status=http.client.FORBIDDEN + expected_status=http.client.FORBIDDEN, ) def test_list_delete_project_shows_in_event_list(self): @@ -3830,11 +4238,13 @@ class TestTokenRevokeApi(TestTokenRevokeById): events = self.get('/OS-REVOKE/events').json_body['events'] self.assertEqual([], events) self.delete( - '/projects/%(project_id)s' % {'project_id': self.projectA['id']}) + '/projects/%(project_id)s' % {'project_id': self.projectA['id']} + ) events_response = self.get('/OS-REVOKE/events').json_body - self.assertValidDeletedProjectResponse(events_response, - self.projectA['id']) + self.assertValidDeletedProjectResponse( + events_response, self.projectA['id'] + ) def assertEventDataInList(self, events, **kwargs): found = False @@ -3852,13 +4262,18 @@ class TestTokenRevokeApi(TestTokenRevokeById): # for each item in kwargs, the event was fully matched and # the assertTrue below should succeed. found = True - self.assertTrue(found, - 'event with correct values not in list, expected to ' - 'find event with key-value pairs. Expected: ' - '"%(expected)s" Events: "%(events)s"' % - {'expected': ','.join( - ["'%s=%s'" % (k, v) for k, v in kwargs.items()]), - 'events': events}) + self.assertTrue( + found, + 'event with correct values not in list, expected to ' + 'find event with key-value pairs. Expected: ' + '"%(expected)s" Events: "%(events)s"' + % { + 'expected': ','.join( + ["'%s=%s'" % (k, v) for k, v in kwargs.items()] + ), + 'events': events, + }, + ) def test_list_delete_token_shows_in_event_list(self): self.role_data_fixtures() @@ -3876,12 +4291,15 @@ class TestTokenRevokeApi(TestTokenRevokeById): response.json_body['token'] headers3 = {'X-Subject-Token': response.headers['X-Subject-Token']} - self.head('/auth/tokens', headers=headers, - expected_status=http.client.OK) - self.head('/auth/tokens', headers=headers2, - expected_status=http.client.OK) - self.head('/auth/tokens', headers=headers3, - expected_status=http.client.OK) + self.head( + '/auth/tokens', headers=headers, expected_status=http.client.OK + ) + self.head( + '/auth/tokens', headers=headers2, expected_status=http.client.OK + ) + self.head( + '/auth/tokens', headers=headers3, expected_status=http.client.OK + ) self.delete('/auth/tokens', headers=headers) # NOTE(ayoung): not deleting token3, as it should be deleted @@ -3889,15 +4307,18 @@ class TestTokenRevokeApi(TestTokenRevokeById): events_response = self.get('/OS-REVOKE/events').json_body events = events_response['events'] self.assertEqual(1, len(events)) - self.assertEventDataInList( - events, - audit_id=token2['audit_ids'][1]) - self.head('/auth/tokens', headers=headers, - expected_status=http.client.NOT_FOUND) - self.head('/auth/tokens', headers=headers2, - expected_status=http.client.OK) - self.head('/auth/tokens', headers=headers3, - expected_status=http.client.OK) + self.assertEventDataInList(events, audit_id=token2['audit_ids'][1]) + self.head( + '/auth/tokens', + headers=headers, + expected_status=http.client.NOT_FOUND, + ) + self.head( + '/auth/tokens', headers=headers2, expected_status=http.client.OK + ) + self.head( + '/auth/tokens', headers=headers3, expected_status=http.client.OK + ) def test_list_with_filter(self): @@ -3915,30 +4336,35 @@ class TestTokenRevokeApi(TestTokenRevokeById): events = self.get('/OS-REVOKE/events').json_body['events'] self.assertEqual(2, len(events)) - future = utils.isotime(timeutils.utcnow() + - datetime.timedelta(seconds=1000)) + future = utils.isotime( + timeutils.utcnow() + datetime.timedelta(seconds=1000) + ) - events = self.get('/OS-REVOKE/events?since=%s' % (future) - ).json_body['events'] + events = self.get('/OS-REVOKE/events?since=%s' % (future)).json_body[ + 'events' + ] self.assertEqual(0, len(events)) class TestAuthExternalDisabled(test_v3.RestfulTestCase): def config_overrides(self): super(TestAuthExternalDisabled, self).config_overrides() - self.config_fixture.config( - group='auth', - methods=['password', 'token']) + self.config_fixture.config(group='auth', methods=['password', 'token']) def test_remote_user_disabled(self): app = self.loadapp() remote_user = '%s@%s' % (self.user['name'], self.domain['name']) with app.test_client() as c: - c.environ_base.update(self.build_external_auth_environ( - remote_user)) + c.environ_base.update( + self.build_external_auth_environ(remote_user) + ) auth_data = self.build_authentication_request() - c.post('/v3/auth/tokens', json=auth_data, - expected_status_code=http.client.UNAUTHORIZED) + c.post( + '/v3/auth/tokens', + json=auth_data, + expected_status_code=http.client.UNAUTHORIZED, + ) + # FIXME(morgan): This test case must be re-worked to function under flask. It # has been commented out until it is re-worked ensuring no issues when webob @@ -4016,11 +4442,17 @@ class TestAuthJSONExternal(test_v3.RestfulTestCase): def test_remote_user_no_method(self): app = self.loadapp() with app.test_client() as c: - c.environ_base.update(self.build_external_auth_environ( - self.default_domain_user['name'])) + c.environ_base.update( + self.build_external_auth_environ( + self.default_domain_user['name'] + ) + ) auth_data = self.build_authentication_request() - c.post('/v3/auth/tokens', json=auth_data, - expected_status_code=http.client.UNAUTHORIZED) + c.post( + '/v3/auth/tokens', + json=auth_data, + expected_status_code=http.client.UNAUTHORIZED, + ) class TrustAPIBehavior(test_v3.RestfulTestCase): @@ -4052,16 +4484,15 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): def config_overrides(self): super(TrustAPIBehavior, self).config_overrides() self.config_fixture.config( - group='trust', - allow_redelegation=True, - max_redelegation_count=10 + group='trust', allow_redelegation=True, max_redelegation_count=10 ) def setUp(self): super(TrustAPIBehavior, self).setUp() # Create a trustee to delegate stuff to - self.trustee_user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain_id) + self.trustee_user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain_id + ) # trustor->trustee self.redelegated_trust_ref = unit.new_trust_ref( @@ -4071,7 +4502,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): impersonation=True, expires=dict(minutes=1), role_ids=[self.role_id], - allow_redelegation=True) + allow_redelegation=True, + ) # trustor->trustee (no redelegation) self.chained_trust_ref = unit.new_trust_ref( @@ -4080,33 +4512,39 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, role_ids=[self.role_id], - allow_redelegation=True) + allow_redelegation=True, + ) def _get_trust_token(self, trust): trust_id = trust['id'] auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust_id) + trust_id=trust_id, + ) trust_token = self.get_requested_token(auth_data) return trust_token def test_depleted_redelegation_count_error(self): self.redelegated_trust_ref['redelegation_count'] = 0 - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) + r = self.post( + '/OS-TRUST/trusts', body={'trust': self.redelegated_trust_ref} + ) trust = self.assertValidTrustResponse(r) trust_token = self._get_trust_token(trust) # Attempt to create a redelegated trust. - self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token, - expected_status=http.client.FORBIDDEN) + self.post( + '/OS-TRUST/trusts', + body={'trust': self.chained_trust_ref}, + token=trust_token, + expected_status=http.client.FORBIDDEN, + ) def test_modified_redelegation_count_error(self): - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) + r = self.post( + '/OS-TRUST/trusts', body={'trust': self.redelegated_trust_ref} + ) trust = self.assertValidTrustResponse(r) trust_token = self._get_trust_token(trust) @@ -4115,21 +4553,26 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): correct = trust['redelegation_count'] - 1 incorrect = correct - 1 self.chained_trust_ref['redelegation_count'] = incorrect - self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token, - expected_status=http.client.FORBIDDEN) + self.post( + '/OS-TRUST/trusts', + body={'trust': self.chained_trust_ref}, + token=trust_token, + expected_status=http.client.FORBIDDEN, + ) def test_max_redelegation_count_constraint(self): incorrect = CONF.trust.max_redelegation_count + 1 self.redelegated_trust_ref['redelegation_count'] = incorrect - self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}, - expected_status=http.client.FORBIDDEN) + self.post( + '/OS-TRUST/trusts', + body={'trust': self.redelegated_trust_ref}, + expected_status=http.client.FORBIDDEN, + ) def test_redelegation_expiry(self): - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) + r = self.post( + '/OS-TRUST/trusts', body={'trust': self.redelegated_trust_ref} + ) trust = self.assertValidTrustResponse(r) trust_token = self._get_trust_token(trust) @@ -4141,15 +4584,19 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, expires=dict(minutes=10), - role_ids=[self.role_id]) - self.post('/OS-TRUST/trusts', - body={'trust': too_long_live_chained_trust_ref}, - token=trust_token, - expected_status=http.client.FORBIDDEN) + role_ids=[self.role_id], + ) + self.post( + '/OS-TRUST/trusts', + body={'trust': too_long_live_chained_trust_ref}, + token=trust_token, + expected_status=http.client.FORBIDDEN, + ) def test_redelegation_remaining_uses(self): - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) + r = self.post( + '/OS-TRUST/trusts', body={'trust': self.redelegated_trust_ref} + ) trust = self.assertValidTrustResponse(r) trust_token = self._get_trust_token(trust) @@ -4157,10 +4604,12 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): # It must fail according to specification: remaining_uses must be # omitted for trust redelegation. Any number here. self.chained_trust_ref['remaining_uses'] = 5 - self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token, - expected_status=http.client.BAD_REQUEST) + self.post( + '/OS-TRUST/trusts', + body={'trust': self.chained_trust_ref}, + token=trust_token, + expected_status=http.client.BAD_REQUEST, + ) def test_roles_subset(self): # Build second role @@ -4168,17 +4617,20 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): PROVIDERS.role_api.create_role(role['id'], role) # assign a new role to the user PROVIDERS.assignment_api.create_grant( - role_id=role['id'], user_id=self.user_id, - project_id=self.project_id + role_id=role['id'], + user_id=self.user_id, + project_id=self.project_id, ) # Create first trust with extended set of roles ref = self.redelegated_trust_ref - ref['expires_at'] = datetime.datetime.utcnow().replace( - year=2032).strftime(unit.TIME_FORMAT) + ref['expires_at'] = ( + datetime.datetime.utcnow() + .replace(year=2032) + .strftime(unit.TIME_FORMAT) + ) ref['roles'].append({'id': role['id']}) - r = self.post('/OS-TRUST/trusts', - body={'trust': ref}) + r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r) # Trust created with exact set of roles (checked by role id) role_id_set = set(r['id'] for r in ref['roles']) @@ -4189,11 +4641,15 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): # Chain second trust with roles subset self.chained_trust_ref['expires_at'] = ( - datetime.datetime.utcnow().replace(year=2028).strftime( - unit.TIME_FORMAT)) - r = self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token) + datetime.datetime.utcnow() + .replace(year=2028) + .strftime(unit.TIME_FORMAT) + ) + r = self.post( + '/OS-TRUST/trusts', + body={'trust': self.chained_trust_ref}, + token=trust_token, + ) trust2 = self.assertValidTrustResponse(r) # First trust contains roles superset # Second trust contains roles subset @@ -4213,15 +4669,15 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): # Assign new roles to the user (with role2 implied) PROVIDERS.assignment_api.create_grant( - role_id=role1['id'], user_id=self.user_id, - project_id=self.project_id + role_id=role1['id'], + user_id=self.user_id, + project_id=self.project_id, ) # Create trust ref = self.redelegated_trust_ref ref['roles'] = [{'id': role1['id']}, {'id': role2['id']}] - resp = self.post('/OS-TRUST/trusts', - body={'trust': ref}) + resp = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(resp) # Trust created with exact set of roles (checked by role id) @@ -4234,7 +4690,7 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id'] + trust_id=trust['id'], ) resp = self.post('/auth/tokens', body=auth_data) trust_token_role_ids = [r['id'] for r in resp.json['token']['roles']] @@ -4250,11 +4706,14 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): impersonation=True, expires=dict(minutes=1), role_names=[self.role['name']], - allow_redelegation=True) - ref['expires_at'] = datetime.datetime.utcnow().replace( - year=2032).strftime(unit.TIME_FORMAT) - r = self.post('/OS-TRUST/trusts', - body={'trust': ref}) + allow_redelegation=True, + ) + ref['expires_at'] = ( + datetime.datetime.utcnow() + .replace(year=2032) + .strftime(unit.TIME_FORMAT) + ) + r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r) # Ensure we can get a token with this trust trust_token = self._get_trust_token(trust) @@ -4265,19 +4724,24 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, role_names=[self.role['name']], - allow_redelegation=True) - ref['expires_at'] = datetime.datetime.utcnow().replace( - year=2028).strftime(unit.TIME_FORMAT) - r = self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=trust_token) + allow_redelegation=True, + ) + ref['expires_at'] = ( + datetime.datetime.utcnow() + .replace(year=2028) + .strftime(unit.TIME_FORMAT) + ) + r = self.post( + '/OS-TRUST/trusts', body={'trust': ref}, token=trust_token + ) trust = self.assertValidTrustResponse(r) # Ensure we can get a token with this trust self._get_trust_token(trust) def test_redelegate_new_role_fails(self): - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) + r = self.post( + '/OS-TRUST/trusts', body={'trust': self.redelegated_trust_ref} + ) trust = self.assertValidTrustResponse(r) trust_token = self._get_trust_token(trust) @@ -4286,8 +4750,9 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): PROVIDERS.role_api.create_role(role['id'], role) # assign a new role to the user PROVIDERS.assignment_api.create_grant( - role_id=role['id'], user_id=self.user_id, - project_id=self.project_id + role_id=role['id'], + user_id=self.user_id, + project_id=self.project_id, ) # Try to chain a trust with the role not from parent trust @@ -4295,31 +4760,40 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): # Bypass policy enforcement with mock.patch.object(policy, 'enforce', return_value=True): - self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token, - expected_status=http.client.FORBIDDEN) + self.post( + '/OS-TRUST/trusts', + body={'trust': self.chained_trust_ref}, + token=trust_token, + expected_status=http.client.FORBIDDEN, + ) def test_redelegation_terminator(self): self.redelegated_trust_ref['expires_at'] = ( - datetime.datetime.utcnow().replace(year=2032).strftime( - unit.TIME_FORMAT)) - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) + datetime.datetime.utcnow() + .replace(year=2032) + .strftime(unit.TIME_FORMAT) + ) + r = self.post( + '/OS-TRUST/trusts', body={'trust': self.redelegated_trust_ref} + ) trust = self.assertValidTrustResponse(r) trust_token = self._get_trust_token(trust) # Build second trust - the terminator self.chained_trust_ref['expires_at'] = ( - datetime.datetime.utcnow().replace(year=2028).strftime( - unit.TIME_FORMAT)) - ref = dict(self.chained_trust_ref, - redelegation_count=1, - allow_redelegation=False) + datetime.datetime.utcnow() + .replace(year=2028) + .strftime(unit.TIME_FORMAT) + ) + ref = dict( + self.chained_trust_ref, + redelegation_count=1, + allow_redelegation=False, + ) - r = self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=trust_token) + r = self.post( + '/OS-TRUST/trusts', body={'trust': ref}, token=trust_token + ) trust = self.assertValidTrustResponse(r) # Check that allow_redelegation == False caused redelegation_count @@ -4329,31 +4803,37 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): trust_token = self._get_trust_token(trust) # Build third trust, same as second - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=trust_token, - expected_status=http.client.FORBIDDEN) + self.post( + '/OS-TRUST/trusts', + body={'trust': ref}, + token=trust_token, + expected_status=http.client.FORBIDDEN, + ) def test_redelegation_without_impersonation(self): # Update trust to not allow impersonation self.redelegated_trust_ref['impersonation'] = False # Create trust - resp = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}, - expected_status=http.client.CREATED) + resp = self.post( + '/OS-TRUST/trusts', + body={'trust': self.redelegated_trust_ref}, + expected_status=http.client.CREATED, + ) trust = self.assertValidTrustResponse(resp) # Get trusted token without impersonation auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) trust_token = self.get_requested_token(auth_data) # Create second user for redelegation - trustee_user_2 = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain_id) + trustee_user_2 = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain_id + ) # Trust for redelegation trust_ref_2 = unit.new_trust_ref( @@ -4363,19 +4843,23 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): impersonation=False, expires=dict(minutes=1), role_ids=[self.role_id], - allow_redelegation=False) + allow_redelegation=False, + ) # Creating a second trust should not be allowed since trustor does not # have the role to delegate thus returning 404 NOT FOUND. - resp = self.post('/OS-TRUST/trusts', - body={'trust': trust_ref_2}, - token=trust_token, - expected_status=http.client.NOT_FOUND) + resp = self.post( + '/OS-TRUST/trusts', + body={'trust': trust_ref_2}, + token=trust_token, + expected_status=http.client.NOT_FOUND, + ) def test_create_unscoped_trust(self): ref = unit.new_trust_ref( trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id']) + trustee_user_id=self.trustee_user['id'], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) self.assertValidTrustResponse(r, ref) @@ -4383,9 +4867,13 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): ref = unit.new_trust_ref( trustor_user_id=self.user_id, trustee_user_id=self.trustee_user['id'], - project_id=self.project_id) - self.post('/OS-TRUST/trusts', body={'trust': ref}, - expected_status=http.client.FORBIDDEN) + project_id=self.project_id, + ) + self.post( + '/OS-TRUST/trusts', + body={'trust': ref}, + expected_status=http.client.FORBIDDEN, + ) def _initialize_test_consume_trust(self, count): # Make sure remaining_uses is decremented as we consume the trust @@ -4394,22 +4882,25 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): trustee_user_id=self.trustee_user['id'], project_id=self.project_id, remaining_uses=count, - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) # make sure the trust exists trust = self.assertValidTrustResponse(r, ref) r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) + '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']} + ) # get a token for the trustee auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], - password=self.trustee_user['password']) + password=self.trustee_user['password'], + ) r = self.v3_create_token(auth_data) token = r.headers.get('X-Subject-Token') # get a trust token, consume one use auth_data = self.build_authentication_request( - token=token, - trust_id=trust['id']) + token=token, trust_id=trust['id'] + ) r = self.v3_create_token(auth_data) return trust @@ -4418,33 +4909,33 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): token = self.v3_create_token( self.build_authentication_request( user_id=self.trustee_user['id'], - password=self.trustee_user['password'] + password=self.trustee_user['password'], ) ).headers.get('X-Subject-Token') auth_data = { 'auth': { - 'identity': { - 'methods': ['token'], - 'token': {'id': token} - }, + 'identity': {'methods': ['token'], 'token': {'id': token}}, # We don't need a trust to execute this test, the # OS-TRUST:trust key of the request body just has to be a # string instead of a dictionary in order to throw a 500 when # it should a 400 Bad Request. - 'scope': {'OS-TRUST:trust': ''} + 'scope': {'OS-TRUST:trust': ''}, } } self.admin_request( - method='POST', path='/v3/auth/tokens', body=auth_data, - expected_status=http.client.BAD_REQUEST + method='POST', + path='/v3/auth/tokens', + body=auth_data, + expected_status=http.client.BAD_REQUEST, ) def test_consume_trust_once(self): trust = self._initialize_test_consume_trust(2) # check decremented value r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) + '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']} + ) trust = r.result.get('trust') self.assertIsNotNone(trust) self.assertEqual(1, trust['remaining_uses']) @@ -4456,14 +4947,17 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): # No more uses, the trust is made unavailable self.get( '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}, - expected_status=http.client.NOT_FOUND) + expected_status=http.client.NOT_FOUND, + ) # this time we can't get a trust token auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + trust_id=trust['id'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_create_unlimited_use_trust(self): # by default trusts are unlimited in terms of tokens that can be @@ -4473,23 +4967,27 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): trustee_user_id=self.trustee_user['id'], project_id=self.project_id, remaining_uses=None, - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r, ref) r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) + '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']} + ) auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], - password=self.trustee_user['password']) + password=self.trustee_user['password'], + ) r = self.v3_create_token(auth_data) token = r.headers.get('X-Subject-Token') auth_data = self.build_authentication_request( - token=token, - trust_id=trust['id']) + token=token, trust_id=trust['id'] + ) r = self.v3_create_token(auth_data) r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) + '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']} + ) trust = r.result.get('trust') self.assertIsNone(trust['remaining_uses']) @@ -4500,7 +4998,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r) @@ -4508,7 +5007,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) trust_token = self.get_requested_token(auth_data) @@ -4519,12 +5019,15 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=trust_token, - expected_status=http.client.FORBIDDEN) + self.post( + '/OS-TRUST/trusts', + body={'trust': ref}, + token=trust_token, + expected_status=http.client.FORBIDDEN, + ) def test_trust_deleted_grant(self): # create a new role @@ -4533,10 +5036,13 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): grant_url = ( '/projects/%(project_id)s/users/%(user_id)s/' - 'roles/%(role_id)s' % { + 'roles/%(role_id)s' + % { 'project_id': self.project_id, 'user_id': self.user_id, - 'role_id': role['id']}) + 'role_id': role['id'], + } + ) # assign a new role self.put(grant_url) @@ -4548,7 +5054,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=False, expires=dict(minutes=1), - role_ids=[role['id']]) + role_ids=[role['id']], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r) @@ -4561,9 +5068,11 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data, - expected_status=http.client.FORBIDDEN) + trust_id=trust['id'], + ) + r = self.v3_create_token( + auth_data, expected_status=http.client.FORBIDDEN + ) def test_trust_chained(self): """Test that a trust token can't be used to execute another trust. @@ -4574,8 +5083,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): """ # create a sub-trustee user sub_trustee_user = unit.create_user( - PROVIDERS.identity_api, - domain_id=test_v3.DEFAULT_DOMAIN_ID) + PROVIDERS.identity_api, domain_id=test_v3.DEFAULT_DOMAIN_ID + ) sub_trustee_user_id = sub_trustee_user['id'] # create a new role @@ -4584,10 +5093,13 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): # assign the new role to trustee self.put( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { + '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' + % { 'project_id': self.project_id, 'user_id': self.trustee_user['id'], - 'role_id': role['id']}) + 'role_id': role['id'], + } + ) # create a trust from trustor -> trustee ref = unit.new_trust_ref( @@ -4596,7 +5108,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust1 = self.assertValidTrustResponse(r) @@ -4604,7 +5117,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - project_id=self.project_id) + project_id=self.project_id, + ) token = self.get_requested_token(auth_data) # create a trust from trustee -> sub-trustee @@ -4614,7 +5128,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, expires=dict(minutes=1), - role_ids=[role['id']]) + role_ids=[role['id']], + ) r = self.post('/OS-TRUST/trusts', token=token, body={'trust': ref}) trust2 = self.assertValidTrustResponse(r) @@ -4622,15 +5137,17 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=sub_trustee_user['id'], password=sub_trustee_user['password'], - trust_id=trust2['id']) + trust_id=trust2['id'], + ) trust_token = self.get_requested_token(auth_data) # attempt to get the second trust using a trust token auth_data = self.build_authentication_request( - token=trust_token, - trust_id=trust1['id']) - r = self.v3_create_token(auth_data, - expected_status=http.client.FORBIDDEN) + token=trust_token, trust_id=trust1['id'] + ) + r = self.v3_create_token( + auth_data, expected_status=http.client.FORBIDDEN + ) def assertTrustTokensRevoked(self, trust_id): revocation_response = self.get('/OS-REVOKE/events') @@ -4639,8 +5156,9 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): for event in revocation_events: if event.get('OS-TRUST:trust_id') == trust_id: found = True - self.assertTrue(found, 'event with trust_id %s not found in list' % - trust_id) + self.assertTrue( + found, 'event with trust_id %s not found in list' % trust_id + ) def test_delete_trust_revokes_tokens(self): ref = unit.new_trust_ref( @@ -4649,23 +5167,26 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=False, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r) trust_id = trust['id'] auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust_id) + trust_id=trust_id, + ) r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse( - r, self.trustee_user) + self.assertValidProjectScopedTokenResponse(r, self.trustee_user) trust_token = r.headers['X-Subject-Token'] - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': trust_id}) + self.delete('/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust_id}) headers = {'X-Subject-Token': trust_token} - self.head('/auth/tokens', headers=headers, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers=headers, + expected_status=http.client.NOT_FOUND, + ) self.assertTrustTokensRevoked(trust_id) def disable_user(self, user): @@ -4679,7 +5200,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=False, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) @@ -4688,7 +5210,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) self.v3_create_token(auth_data) self.disable_user(self.user) @@ -4696,9 +5219,9 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data, - expected_status=http.client.FORBIDDEN) + trust_id=trust['id'], + ) + self.v3_create_token(auth_data, expected_status=http.client.FORBIDDEN) def test_trust_get_token_fails_if_trustee_disabled(self): ref = unit.new_trust_ref( @@ -4707,7 +5230,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=False, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) @@ -4716,7 +5240,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) self.v3_create_token(auth_data) self.disable_user(self.trustee_user) @@ -4724,9 +5249,11 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + trust_id=trust['id'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_delete_trust(self): ref = unit.new_trust_ref( @@ -4735,21 +5262,25 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=False, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r, ref) - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': trust['id']}) + self.delete( + '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']} + ) auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + trust_id=trust['id'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_change_password_invalidates_trust_tokens(self): ref = unit.new_trust_ref( @@ -4758,7 +5289,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r) @@ -4766,47 +5298,56 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) r = self.v3_create_token(auth_data) self.assertValidProjectScopedTokenResponse(r, self.user) trust_token = r.headers.get('X-Subject-Token') - self.get('/OS-TRUST/trusts?trustor_user_id=%s' % - self.user_id, token=trust_token) + self.get( + '/OS-TRUST/trusts?trustor_user_id=%s' % self.user_id, + token=trust_token, + ) self.assertValidUserResponse( - self.patch('/users/%s' % self.trustee_user['id'], - body={'user': {'password': uuid.uuid4().hex}})) + self.patch( + '/users/%s' % self.trustee_user['id'], + body={'user': {'password': uuid.uuid4().hex}}, + ) + ) - self.get('/OS-TRUST/trusts?trustor_user_id=%s' % - self.user_id, expected_status=http.client.UNAUTHORIZED, - token=trust_token) + self.get( + '/OS-TRUST/trusts?trustor_user_id=%s' % self.user_id, + expected_status=http.client.UNAUTHORIZED, + token=trust_token, + ) def test_trustee_can_do_role_ops(self): - resp = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) + resp = self.post( + '/OS-TRUST/trusts', body={'trust': self.redelegated_trust_ref} + ) trust = self.assertValidTrustResponse(resp) trust_token = self._get_trust_token(trust) resp = self.get( - '/OS-TRUST/trusts/%(trust_id)s/roles' % { - 'trust_id': trust['id']}, - token=trust_token) + '/OS-TRUST/trusts/%(trust_id)s/roles' % {'trust_id': trust['id']}, + token=trust_token, + ) self.assertValidRoleListResponse(resp, self.role) self.head( - '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % { - 'trust_id': trust['id'], - 'role_id': self.role['id']}, + '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' + % {'trust_id': trust['id'], 'role_id': self.role['id']}, token=trust_token, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) resp = self.get( - '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % { - 'trust_id': trust['id'], - 'role_id': self.role['id']}, - token=trust_token) + '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' + % {'trust_id': trust['id'], 'role_id': self.role['id']}, + token=trust_token, + ) self.assertValidRoleResponse(resp, self.role) def test_do_not_consume_remaining_uses_when_get_token_fails(self): @@ -4817,7 +5358,8 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): impersonation=False, expires=dict(minutes=1), role_ids=[self.role_id], - remaining_uses=3) + remaining_uses=3, + ) r = self.post('/OS-TRUST/trusts', body={'trust': ref}) new_trust = r.result.get('trust') @@ -4828,9 +5370,9 @@ class TrustAPIBehavior(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=self.default_domain_user['id'], password=self.default_domain_user['password'], - trust_id=trust_id) - self.v3_create_token(auth_data, - expected_status=http.client.FORBIDDEN) + trust_id=trust_id, + ) + self.v3_create_token(auth_data, expected_status=http.client.FORBIDDEN) r = self.get('/OS-TRUST/trusts/%s' % trust_id) self.assertEqual(3, r.result.get('trust').get('remaining_uses')) @@ -4841,9 +5383,7 @@ class TestTrustChain(test_v3.RestfulTestCase): def config_overrides(self): super(TestTrustChain, self).config_overrides() self.config_fixture.config( - group='trust', - allow_redelegation=True, - max_redelegation_count=10 + group='trust', allow_redelegation=True, max_redelegation_count=10 ) def setUp(self): @@ -4868,8 +5408,9 @@ class TestTrustChain(test_v3.RestfulTestCase): self.user_list = list() self.trust_chain = list() for _ in range(3): - user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain_id) + user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain_id + ) self.user_list.append(user) # trustor->trustee redelegation with impersonation @@ -4882,17 +5423,18 @@ class TestTrustChain(test_v3.RestfulTestCase): expires=dict(minutes=1), role_ids=[self.role_id], allow_redelegation=True, - redelegation_count=3) + redelegation_count=3, + ) # Create a trust between self.user and the first user in the list - r = self.post('/OS-TRUST/trusts', - body={'trust': trust_ref}) + r = self.post('/OS-TRUST/trusts', body={'trust': trust_ref}) trust = self.assertValidTrustResponse(r) auth_data = self.build_authentication_request( user_id=trustee['id'], password=trustee['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) # Generate a trusted token for the first user trust_token = self.get_requested_token(auth_data) @@ -4906,15 +5448,19 @@ class TestTrustChain(test_v3.RestfulTestCase): project_id=self.project_id, impersonation=True, role_ids=[self.role_id], - allow_redelegation=True) - r = self.post('/OS-TRUST/trusts', - body={'trust': trust_ref}, - token=trust_token) + allow_redelegation=True, + ) + r = self.post( + '/OS-TRUST/trusts', + body={'trust': trust_ref}, + token=trust_token, + ) trust = self.assertValidTrustResponse(r) auth_data = self.build_authentication_request( user_id=next_trustee['id'], password=next_trustee['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) trust_token = self.get_requested_token(auth_data) self.trust_chain.append(trust) @@ -4923,14 +5469,14 @@ class TestTrustChain(test_v3.RestfulTestCase): auth_data = self.build_authentication_request( user_id=trustee['id'], password=trustee['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) self.last_token = self.get_requested_token(auth_data) def assert_user_authenticate(self, user): auth_data = self.build_authentication_request( - user_id=user['id'], - password=user['password'] + user_id=user['id'], password=user['password'] ) r = self.v3_create_token(auth_data) self.assertValidTokenResponse(r) @@ -4938,8 +5484,7 @@ class TestTrustChain(test_v3.RestfulTestCase): def assert_trust_tokens_revoked(self, trust_id): trustee = self.user_list[0] auth_data = self.build_authentication_request( - user_id=trustee['id'], - password=trustee['password'] + user_id=trustee['id'], password=trustee['password'] ) r = self.v3_create_token(auth_data) self.assertValidTokenResponse(r) @@ -4950,37 +5495,48 @@ class TestTrustChain(test_v3.RestfulTestCase): for event in revocation_events: if event.get('OS-TRUST:trust_id') == trust_id: found = True - self.assertTrue(found, 'event with trust_id %s not found in list' % - trust_id) + self.assertTrue( + found, 'event with trust_id %s not found in list' % trust_id + ) def test_delete_trust_cascade(self): self.assert_user_authenticate(self.user_list[0]) - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': self.trust_chain[0]['id']}) + self.delete( + '/OS-TRUST/trusts/%(trust_id)s' + % {'trust_id': self.trust_chain[0]['id']} + ) headers = {'X-Subject-Token': self.last_token} - self.head('/auth/tokens', headers=headers, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers=headers, + expected_status=http.client.NOT_FOUND, + ) self.assert_trust_tokens_revoked(self.trust_chain[0]['id']) def test_delete_broken_chain(self): self.assert_user_authenticate(self.user_list[0]) - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': self.trust_chain[0]['id']}) + self.delete( + '/OS-TRUST/trusts/%(trust_id)s' + % {'trust_id': self.trust_chain[0]['id']} + ) # Verify the two remaining trust have been deleted for i in range(len(self.user_list) - 1): auth_data = self.build_authentication_request( user_id=self.user_list[i]['id'], - password=self.user_list[i]['password']) + password=self.user_list[i]['password'], + ) auth_token = self.get_requested_token(auth_data) # Assert chained trust have been deleted - self.get('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': self.trust_chain[i + 1]['id']}, + self.get( + '/OS-TRUST/trusts/%(trust_id)s' + % {'trust_id': self.trust_chain[i + 1]['id']}, token=auth_token, - expected_status=http.client.NOT_FOUND) + expected_status=http.client.NOT_FOUND, + ) def test_trustor_roles_revoked(self): self.assert_user_authenticate(self.user_list[0]) @@ -4993,18 +5549,19 @@ class TestTrustChain(test_v3.RestfulTestCase): for i in range(len(self.user_list[1:])): trustee = self.user_list[i] auth_data = self.build_authentication_request( - user_id=trustee['id'], - password=trustee['password']) + user_id=trustee['id'], password=trustee['password'] + ) # Attempt to authenticate with trust token = self.get_requested_token(auth_data) auth_data = self.build_authentication_request( - token=token, - trust_id=self.trust_chain[i - 1]['id']) + token=token, trust_id=self.trust_chain[i - 1]['id'] + ) # Trustee has no delegated roles - self.v3_create_token(auth_data, - expected_status=http.client.FORBIDDEN) + self.v3_create_token( + auth_data, expected_status=http.client.FORBIDDEN + ) def test_intermediate_user_disabled(self): self.assert_user_authenticate(self.user_list[0]) @@ -5016,8 +5573,11 @@ class TestTrustChain(test_v3.RestfulTestCase): # Bypass policy enforcement with mock.patch.object(policy, 'enforce', return_value=True): headers = {'X-Subject-Token': self.last_token} - self.head('/auth/tokens', headers=headers, - expected_status=http.client.FORBIDDEN) + self.head( + '/auth/tokens', + headers=headers, + expected_status=http.client.FORBIDDEN, + ) def test_intermediate_user_deleted(self): self.assert_user_authenticate(self.user_list[0]) @@ -5028,8 +5588,11 @@ class TestTrustChain(test_v3.RestfulTestCase): # Delete trustee will invalidate the trust. with mock.patch.object(policy, 'enforce', return_value=True): headers = {'X-Subject-Token': self.last_token} - self.head('/auth/tokens', headers=headers, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers=headers, + expected_status=http.client.NOT_FOUND, + ) class TestAuthContext(unit.TestCase): @@ -5039,8 +5602,9 @@ class TestAuthContext(unit.TestCase): def test_pick_lowest_expires_at(self): expires_at_1 = utils.isotime(timeutils.utcnow()) - expires_at_2 = utils.isotime(timeutils.utcnow() + - datetime.timedelta(seconds=10)) + expires_at_2 = utils.isotime( + timeutils.utcnow() + datetime.timedelta(seconds=10) + ) # make sure auth_context picks the lowest value self.auth_context['expires_at'] = expires_at_1 self.auth_context['expires_at'] = expires_at_2 @@ -5053,11 +5617,13 @@ class TestAuthContext(unit.TestCase): # 'expires_at' is a special case. Will test it in a separate # test case. continue - self.assertRaises(exception.Unauthorized, - operator.setitem, - self.auth_context, - identity_attr, - uuid.uuid4().hex) + self.assertRaises( + exception.Unauthorized, + operator.setitem, + self.auth_context, + identity_attr, + uuid.uuid4().hex, + ) def test_identity_attribute_conflict_with_none_value(self): for identity_attr in auth.core.AuthContext.IDENTITY_ATTRIBUTES: @@ -5068,11 +5634,13 @@ class TestAuthContext(unit.TestCase): self.auth_context['expires_at'] = uuid.uuid4().hex continue - self.assertRaises(exception.Unauthorized, - operator.setitem, - self.auth_context, - identity_attr, - uuid.uuid4().hex) + self.assertRaises( + exception.Unauthorized, + operator.setitem, + self.auth_context, + identity_attr, + uuid.uuid4().hex, + ) def test_non_identity_attribute_conflict_override(self): # for attributes Keystone doesn't know about, make sure they can be @@ -5099,30 +5667,38 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): def test_get_catalog_with_domain_scoped_token(self): """Call ``GET /auth/catalog`` with a domain-scoped token.""" # grant a domain role to a user - self.put(path='/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id'])) + self.put( + path='/domains/%s/users/%s/roles/%s' + % (self.domain['id'], self.user['id'], self.role['id']) + ) self.get( '/auth/catalog', auth=self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain['id']), - expected_status=http.client.FORBIDDEN) + domain_id=self.domain['id'], + ), + expected_status=http.client.FORBIDDEN, + ) def test_head_catalog_with_domain_scoped_token(self): """Call ``HEAD /auth/catalog`` with a domain-scoped token.""" # grant a domain role to a user - self.put(path='/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id'])) + self.put( + path='/domains/%s/users/%s/roles/%s' + % (self.domain['id'], self.user['id'], self.role['id']) + ) self.head( '/auth/catalog', auth=self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain['id']), - expected_status=http.client.FORBIDDEN) + domain_id=self.domain['id'], + ), + expected_status=http.client.FORBIDDEN, + ) def test_get_catalog_with_unscoped_token(self): """Call ``GET /auth/catalog`` with an unscoped token.""" @@ -5130,8 +5706,10 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): '/auth/catalog', auth=self.build_authentication_request( user_id=self.default_domain_user['id'], - password=self.default_domain_user['password']), - expected_status=http.client.FORBIDDEN) + password=self.default_domain_user['password'], + ), + expected_status=http.client.FORBIDDEN, + ) def test_head_catalog_with_unscoped_token(self): """Call ``HEAD /auth/catalog`` with an unscoped token.""" @@ -5139,15 +5717,17 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): '/auth/catalog', auth=self.build_authentication_request( user_id=self.default_domain_user['id'], - password=self.default_domain_user['password']), - expected_status=http.client.FORBIDDEN) + password=self.default_domain_user['password'], + ), + expected_status=http.client.FORBIDDEN, + ) def test_get_catalog_no_token(self): """Call ``GET /auth/catalog`` without a token.""" self.get( '/auth/catalog', noauth=True, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) def test_head_catalog_no_token(self): @@ -5155,7 +5735,7 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): self.head( '/auth/catalog', noauth=True, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) def test_get_projects_with_project_scoped_token(self): @@ -5202,7 +5782,7 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): path = '/domains/%(domain_id)s/users/%(user_id)s/roles/%(role_id)s' % { 'domain_id': authorized_domain_id, 'user_id': self.user_id, - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path, expected_status=http.client.NO_CONTENT) @@ -5220,23 +5800,27 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): self.assertNotEqual(unauthorized_domain_id, r.json['domains'][0]['id']) def test_get_domains_with_project_scoped_token(self): - self.put(path='/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id'])) + self.put( + path='/domains/%s/users/%s/roles/%s' + % (self.domain['id'], self.user['id'], self.role['id']) + ) r = self.get('/auth/domains', expected_status=http.client.OK) self.assertThat(r.json['domains'], matchers.HasLength(1)) self.assertValidDomainListResponse(r) def test_head_domains_with_project_scoped_token(self): - self.put(path='/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id'])) + self.put( + path='/domains/%s/users/%s/roles/%s' + % (self.domain['id'], self.user['id'], self.role['id']) + ) self.head('/auth/domains', expected_status=http.client.OK) def test_get_system_roles_with_unscoped_token(self): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path=path) @@ -5249,8 +5833,9 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): response = self.get('/auth/system', token=unscoped_token) self.assertTrue(response.json_body['system'][0]['all']) self.head( - '/auth/system', token=unscoped_token, - expected_status=http.client.OK + '/auth/system', + token=unscoped_token, + expected_status=http.client.OK, ) def test_get_system_roles_returns_empty_list_without_system_roles(self): @@ -5266,13 +5851,15 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): response = self.get('/auth/system', token=unscoped_token) self.assertEqual(response.json_body['system'], []) self.head( - '/auth/system', token=unscoped_token, - expected_status=http.client.OK + '/auth/system', + token=unscoped_token, + expected_status=http.client.OK, ) project_scoped_request = self.build_authentication_request( - user_id=self.user['id'], password=self.user['password'], - project_id=self.project_id + user_id=self.user['id'], + password=self.user['password'], + project_id=self.project_id, ) r = self.post('/auth/tokens', body=project_scoped_request) project_scoped_token = r.headers.get('X-Subject-Token') @@ -5280,23 +5867,27 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): response = self.get('/auth/system', token=project_scoped_token) self.assertEqual(response.json_body['system'], []) self.head( - '/auth/system', token=project_scoped_token, - expected_status=http.client.OK + '/auth/system', + token=project_scoped_token, + expected_status=http.client.OK, ) def test_get_system_roles_with_project_scoped_token(self): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path=path) - self.put(path='/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id'])) + self.put( + path='/domains/%s/users/%s/roles/%s' + % (self.domain['id'], self.user['id'], self.role['id']) + ) domain_scoped_request = self.build_authentication_request( - user_id=self.user['id'], password=self.user['password'], - domain_id=self.domain['id'] + user_id=self.user['id'], + password=self.user['password'], + domain_id=self.domain['id'], ) r = self.post('/auth/tokens', body=domain_scoped_request) domain_scoped_token = r.headers.get('X-Subject-Token') @@ -5304,20 +5895,22 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): response = self.get('/auth/system', token=domain_scoped_token) self.assertTrue(response.json_body['system'][0]['all']) self.head( - '/auth/system', token=domain_scoped_token, - expected_status=http.client.OK + '/auth/system', + token=domain_scoped_token, + expected_status=http.client.OK, ) def test_get_system_roles_with_domain_scoped_token(self): path = '/system/users/%(user_id)s/roles/%(role_id)s' % { 'user_id': self.user['id'], - 'role_id': self.role_id + 'role_id': self.role_id, } self.put(path=path) project_scoped_request = self.build_authentication_request( - user_id=self.user['id'], password=self.user['password'], - project_id=self.project_id + user_id=self.user['id'], + password=self.user['password'], + project_id=self.project_id, ) r = self.post('/auth/tokens', body=project_scoped_request) project_scoped_token = r.headers.get('X-Subject-Token') @@ -5325,23 +5918,24 @@ class TestAuthSpecificData(test_v3.RestfulTestCase): response = self.get('/auth/system', token=project_scoped_token) self.assertTrue(response.json_body['system'][0]['all']) self.head( - '/auth/system', token=project_scoped_token, - expected_status=http.client.OK + '/auth/system', + token=project_scoped_token, + expected_status=http.client.OK, ) class TestTrustAuthFernetTokenProvider(TrustAPIBehavior, TestTrustChain): def config_overrides(self): super(TestTrustAuthFernetTokenProvider, self).config_overrides() - self.config_fixture.config(group='token', - provider='fernet', - revoke_by_id=False) + self.config_fixture.config( + group='token', provider='fernet', revoke_by_id=False + ) self.config_fixture.config(group='trust') self.useFixture( ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) @@ -5354,13 +5948,14 @@ class TestAuthTOTP(test_v3.RestfulTestCase): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_fernet.MAX_ACTIVE_KEYS + credential_fernet.MAX_ACTIVE_KEYS, ) ) ref = unit.new_totp_credential( user_id=self.default_domain_user['id'], - project_id=self.default_domain_project['id']) + project_id=self.default_domain_project['id'], + ) self.secret = ref['blob'] @@ -5373,8 +5968,9 @@ class TestAuthTOTP(test_v3.RestfulTestCase): methods = ['totp', 'token', 'password'] super(TestAuthTOTP, self).auth_plugin_config_override(methods) - def _make_credentials(self, cred_type, count=1, user_id=None, - project_id=None, blob=None): + def _make_credentials( + self, cred_type, count=1, user_id=None, project_id=None, blob=None + ): user_id = user_id or self.default_domain_user['id'] project_id = project_id or self.default_domain_project['id'] @@ -5382,10 +5978,12 @@ class TestAuthTOTP(test_v3.RestfulTestCase): for __ in range(count): if cred_type == 'totp': ref = unit.new_totp_credential( - user_id=user_id, project_id=project_id, blob=blob) + user_id=user_id, project_id=project_id, blob=blob + ) else: ref = unit.new_credential_ref( - user_id=user_id, project_id=project_id) + user_id=user_id, project_id=project_id + ) resp = self.post('/credentials', body={'credential': ref}) creds.append(resp.json['credential']) return creds @@ -5394,25 +5992,31 @@ class TestAuthTOTP(test_v3.RestfulTestCase): return self.build_authentication_request( user_id=user_id or self.default_domain_user['id'], passcode=passcode, - project_id=self.project['id']) + project_id=self.project['id'], + ) def _make_auth_data_by_name(self, passcode, username, user_domain_id): return self.build_authentication_request( username=username, user_domain_id=user_domain_id, passcode=passcode, - project_id=self.project['id']) + project_id=self.project['id'], + ) def cleanup(self): totp_creds = PROVIDERS.credential_api.list_credentials_for_user( - self.default_domain_user['id'], type='totp') + self.default_domain_user['id'], type='totp' + ) other_creds = PROVIDERS.credential_api.list_credentials_for_user( - self.default_domain_user['id'], type='other') + self.default_domain_user['id'], type='other' + ) for cred in itertools.chain(other_creds, totp_creds): - self.delete('/credentials/%s' % cred['id'], - expected_status=http.client.NO_CONTENT) + self.delete( + '/credentials/%s' % cred['id'], + expected_status=http.client.NO_CONTENT, + ) def test_with_a_valid_passcode(self): creds = self._make_credentials('totp') @@ -5424,7 +6028,8 @@ class TestAuthTOTP(test_v3.RestfulTestCase): self.useFixture(fixture.TimeFixture()) auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0]) + totp._generate_totp_passcodes(secret)[0] + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) @@ -5435,43 +6040,46 @@ class TestAuthTOTP(test_v3.RestfulTestCase): past = datetime.datetime.utcnow() - datetime.timedelta(minutes=2) with freezegun.freeze_time(past): auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0]) + totp._generate_totp_passcodes(secret)[0] + ) # Stop the clock otherwise there is a chance of accidental success due # to getting a different TOTP between the call here and the call in the # auth plugin. self.useFixture(fixture.TimeFixture()) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_with_an_expired_passcode_no_previous_windows(self): - self.config_fixture.config(group='totp', - included_previous_windows=0) + self.config_fixture.config(group='totp', included_previous_windows=0) creds = self._make_credentials('totp') secret = creds[-1]['blob'] past = datetime.datetime.utcnow() - datetime.timedelta(seconds=30) with freezegun.freeze_time(past): auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0]) + totp._generate_totp_passcodes(secret)[0] + ) # Stop the clock otherwise there is a chance of accidental success due # to getting a different TOTP between the call here and the call in the # auth plugin. self.useFixture(fixture.TimeFixture()) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_with_passcode_no_previous_windows(self): - self.config_fixture.config(group='totp', - included_previous_windows=0) + self.config_fixture.config(group='totp', included_previous_windows=0) creds = self._make_credentials('totp') secret = creds[-1]['blob'] auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0]) + totp._generate_totp_passcodes(secret)[0] + ) # Stop the clock otherwise there is a chance of auth failure due to # getting a different TOTP between the call here and the call in the @@ -5488,7 +6096,8 @@ class TestAuthTOTP(test_v3.RestfulTestCase): past = datetime.datetime.utcnow() - datetime.timedelta(seconds=30) with freezegun.freeze_time(past): auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0]) + totp._generate_totp_passcodes(secret)[0] + ) # Stop the clock otherwise there is a chance of auth failure due to # getting a different TOTP between the call here and the call in the @@ -5498,15 +6107,15 @@ class TestAuthTOTP(test_v3.RestfulTestCase): self.v3_create_token(auth_data, expected_status=http.client.CREATED) def test_with_passcode_in_previous_windows_extended(self): - self.config_fixture.config(group='totp', - included_previous_windows=4) + self.config_fixture.config(group='totp', included_previous_windows=4) creds = self._make_credentials('totp') secret = creds[-1]['blob'] past = datetime.datetime.utcnow() - datetime.timedelta(minutes=2) self.useFixture(fixture.TimeFixture(past)) auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0]) + totp._generate_totp_passcodes(secret)[0] + ) # Stop the clock otherwise there is a chance of auth failure due to # getting a different TOTP between the call here and the call in the @@ -5518,19 +6127,22 @@ class TestAuthTOTP(test_v3.RestfulTestCase): def test_with_an_invalid_passcode_and_user_credentials(self): self._make_credentials('totp') auth_data = self._make_auth_data_by_id('000000') - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_with_an_invalid_passcode_with_no_user_credentials(self): auth_data = self._make_auth_data_by_id('000000') - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_with_a_corrupt_totp_credential(self): self._make_credentials('totp', count=1, blob='0') auth_data = self._make_auth_data_by_id('000000') - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_with_multiple_credentials(self): self._make_credentials('other', 3) @@ -5543,7 +6155,8 @@ class TestAuthTOTP(test_v3.RestfulTestCase): self.useFixture(fixture.TimeFixture()) auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0]) + totp._generate_totp_passcodes(secret)[0] + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) def test_with_multiple_users(self): @@ -5566,7 +6179,8 @@ class TestAuthTOTP(test_v3.RestfulTestCase): self.useFixture(fixture.TimeFixture()) auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0], user_id=user['id']) + totp._generate_totp_passcodes(secret)[0], user_id=user['id'] + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) def test_with_multiple_users_and_invalid_credentials(self): @@ -5579,22 +6193,27 @@ class TestAuthTOTP(test_v3.RestfulTestCase): self._make_credentials('totp', count=3) # create a new user and their credentials - new_user = unit.create_user(PROVIDERS.identity_api, - domain_id=self.domain_id) + new_user = unit.create_user( + PROVIDERS.identity_api, domain_id=self.domain_id + ) PROVIDERS.assignment_api.create_grant( - self.role['id'], user_id=new_user['id'], - project_id=self.project['id'] + self.role['id'], + user_id=new_user['id'], + project_id=self.project['id'], ) user2_creds = self._make_credentials( - 'totp', count=1, user_id=new_user['id']) + 'totp', count=1, user_id=new_user['id'] + ) user_id = self.default_domain_user['id'] # user1 secret = user2_creds[-1]['blob'] auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcodes(secret)[0], user_id=user_id) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + totp._generate_totp_passcodes(secret)[0], user_id=user_id + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_with_username_and_domain_id(self): creds = self._make_credentials('totp') @@ -5608,7 +6227,8 @@ class TestAuthTOTP(test_v3.RestfulTestCase): auth_data = self._make_auth_data_by_name( totp._generate_totp_passcodes(secret)[0], username=self.default_domain_user['name'], - user_domain_id=self.default_domain_user['domain_id']) + user_domain_id=self.default_domain_user['domain_id'], + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) @@ -5632,7 +6252,7 @@ class TestFetchRevocationList(test_v3.RestfulTestCase): # can be returned. self.get( '/auth/tokens/OS-PKI/revoked', - expected_status=http.client.FORBIDDEN + expected_status=http.client.FORBIDDEN, ) def test_head_ids_no_tokens_returns_forbidden(self): @@ -5641,7 +6261,7 @@ class TestFetchRevocationList(test_v3.RestfulTestCase): # can be returned. self.head( '/auth/tokens/OS-PKI/revoked', - expected_status=http.client.FORBIDDEN + expected_status=http.client.FORBIDDEN, ) @@ -5654,7 +6274,8 @@ class ApplicationCredentialAuth(test_v3.RestfulTestCase): def config_overrides(self): super(ApplicationCredentialAuth, self).config_overrides() self.auth_plugin_config_override( - methods=['application_credential', 'password', 'token']) + methods=['application_credential', 'password', 'token'] + ) def _make_app_cred(self, expires=None, access_rules=None): roles = [{'id': self.role_id}] @@ -5665,7 +6286,7 @@ class ApplicationCredentialAuth(test_v3.RestfulTestCase): 'user_id': self.user['id'], 'project_id': self.project['id'], 'description': uuid.uuid4().hex, - 'roles': roles + 'roles': roles, } if expires: data['expires_at'] = expires @@ -5673,41 +6294,46 @@ class ApplicationCredentialAuth(test_v3.RestfulTestCase): data['access_rules'] = access_rules return data - def _validate_token(self, token, headers=None, - expected_status=http.client.OK): + def _validate_token( + self, token, headers=None, expected_status=http.client.OK + ): path = '/v3/auth/tokens' headers = headers or {} headers.update({'X-Auth-Token': token, 'X-Subject-Token': token}) with self.test_client() as c: - resp = c.get(path, headers=headers, - expected_status_code=expected_status) + resp = c.get( + path, headers=headers, expected_status_code=expected_status + ) return resp def test_valid_application_credential_succeeds(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) def test_validate_application_credential_token_populates_restricted(self): self.config_fixture.config(group='token', cache_on_issue=False) app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) auth_response = self.v3_create_token( - auth_data, expected_status=http.client.CREATED) + auth_data, expected_status=http.client.CREATED + ) self.assertTrue( auth_response.json['token']['application_credential']['restricted'] ) token_id = auth_response.headers.get('X-Subject-Token') headers = {'X-Auth-Token': token_id, 'X-Subject-Token': token_id} - validate_response = self.get( - '/auth/tokens', headers=headers - ).json_body + validate_response = self.get('/auth/tokens', headers=headers).json_body self.assertTrue( validate_response['token']['application_credential']['restricted'] ) @@ -5715,113 +6341,142 @@ class ApplicationCredentialAuth(test_v3.RestfulTestCase): def test_valid_application_credential_with_name_succeeds(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_name=app_cred_ref['name'], secret=app_cred_ref['secret'], - user_id=self.user['id']) + app_cred_name=app_cred_ref['name'], + secret=app_cred_ref['secret'], + user_id=self.user['id'], + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) def test_valid_application_credential_name_and_username_succeeds(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_name=app_cred_ref['name'], secret=app_cred_ref['secret'], - username=self.user['name'], user_domain_id=self.user['domain_id']) + app_cred_name=app_cred_ref['name'], + secret=app_cred_ref['secret'], + username=self.user['name'], + user_domain_id=self.user['domain_id'], + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) def test_application_credential_with_invalid_secret_fails(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret='badsecret') - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + app_cred_id=app_cred_ref['id'], secret='badsecret' + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_unexpired_application_credential_succeeds(self): expires_at = datetime.datetime.utcnow() + datetime.timedelta(minutes=1) app_cred = self._make_app_cred(expires=expires_at) app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) def test_expired_application_credential_fails(self): expires_at = datetime.datetime.utcnow() + datetime.timedelta(minutes=1) app_cred = self._make_app_cred(expires=expires_at) app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) future = datetime.datetime.utcnow() + datetime.timedelta(minutes=2) with freezegun.freeze_time(future): - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_application_credential_expiration_limits_token_expiration(self): expires_at = datetime.datetime.utcnow() + datetime.timedelta(minutes=1) app_cred = self._make_app_cred(expires=expires_at) app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) - resp = self.v3_create_token(auth_data, - expected_status=http.client.CREATED) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) + resp = self.v3_create_token( + auth_data, expected_status=http.client.CREATED + ) token = resp.headers.get('X-Subject-Token') future = datetime.datetime.utcnow() + datetime.timedelta(minutes=2) with freezegun.freeze_time(future): - self._validate_token(token, - expected_status=http.client.UNAUTHORIZED) + self._validate_token( + token, expected_status=http.client.UNAUTHORIZED + ) def test_application_credential_fails_when_user_deleted(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.identity_api.delete_user(self.user['id']) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) self.v3_create_token(auth_data, expected_status=http.client.NOT_FOUND) def test_application_credential_fails_when_user_disabled(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) - PROVIDERS.identity_api.update_user(self.user['id'], - {'enabled': False}) + app_cred + ) + PROVIDERS.identity_api.update_user(self.user['id'], {'enabled': False}) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_application_credential_fails_when_project_deleted(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.resource_api.delete_project(self.project['id']) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) self.v3_create_token(auth_data, expected_status=http.client.NOT_FOUND) def test_application_credential_fails_when_role_deleted(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.role_api.delete_role(self.role_id) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) self.v3_create_token(auth_data, expected_status=http.client.NOT_FOUND) def test_application_credential_fails_when_role_unassigned(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) PROVIDERS.assignment_api.remove_role_from_user_and_project( - self.user['id'], self.project['id'], - self.role_id) + self.user['id'], self.project['id'], self.role_id + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) self.v3_create_token(auth_data, expected_status=http.client.NOT_FOUND) def test_application_credential_through_group_membership(self): @@ -5832,9 +6487,7 @@ class ApplicationCredentialAuth(test_v3.RestfulTestCase): group1 = unit.new_group_ref(domain_id=self.domain_id) group1 = PROVIDERS.identity_api.create_group(group1) - PROVIDERS.identity_api.add_user_to_group( - user1['id'], group1['id'] - ) + PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id']) PROVIDERS.assignment_api.create_grant( self.role_id, group_id=group1['id'], project_id=self.project_id ) @@ -5846,40 +6499,49 @@ class ApplicationCredentialAuth(test_v3.RestfulTestCase): 'user_id': user1['id'], 'project_id': self.project_id, 'description': uuid.uuid4().hex, - 'roles': [{'id': self.role_id}] + 'roles': [{'id': self.role_id}], } app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) self.v3_create_token(auth_data, expected_status=http.client.CREATED) def test_application_credential_cannot_scope(self): app_cred = self._make_app_cred() app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) new_project_ref = unit.new_project_ref(domain_id=self.domain_id) # Create a new project and assign the user a valid role on it new_project = PROVIDERS.resource_api.create_project( - new_project_ref['id'], new_project_ref) + new_project_ref['id'], new_project_ref + ) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user['id'], new_project['id'], self.role_id) + self.user['id'], new_project['id'], self.role_id + ) # Check that a password auth would work password_auth = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - project_id=new_project['id']) + project_id=new_project['id'], + ) password_response = self.v3_create_token(password_auth) self.assertValidProjectScopedTokenResponse(password_response) # Should not be able to use that scope with an application credential # even though the user has a valid assignment on it app_cred_auth = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'], - project_id=new_project['id']) - self.v3_create_token(app_cred_auth, - expected_status=http.client.UNAUTHORIZED) + app_cred_id=app_cred_ref['id'], + secret=app_cred_ref['secret'], + project_id=new_project['id'], + ) + self.v3_create_token( + app_cred_auth, expected_status=http.client.UNAUTHORIZED + ) def test_application_credential_with_access_rules(self): access_rules = [ @@ -5892,11 +6554,14 @@ class ApplicationCredentialAuth(test_v3.RestfulTestCase): ] app_cred = self._make_app_cred(access_rules=access_rules) app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) - resp = self.v3_create_token(auth_data, - expected_status=http.client.CREATED) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) + resp = self.v3_create_token( + auth_data, expected_status=http.client.CREATED + ) token = resp.headers.get('X-Subject-Token') headers = {'OpenStack-Identity-Access-Rules': '1.0'} self._validate_token(token, headers=headers) @@ -5912,10 +6577,13 @@ class ApplicationCredentialAuth(test_v3.RestfulTestCase): ] app_cred = self._make_app_cred(access_rules=access_rules) app_cred_ref = self.app_cred_api.create_application_credential( - app_cred) + app_cred + ) auth_data = self.build_authentication_request( - app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret']) - resp = self.v3_create_token(auth_data, - expected_status=http.client.CREATED) + app_cred_id=app_cred_ref['id'], secret=app_cred_ref['secret'] + ) + resp = self.v3_create_token( + auth_data, expected_status=http.client.CREATED + ) token = resp.headers.get('X-Subject-Token') self._validate_token(token, expected_status=http.client.NOT_FOUND) diff --git a/keystone/tests/unit/test_v3_catalog.py b/keystone/tests/unit/test_v3_catalog.py index a5b8b04aa2..076b2ad5b5 100644 --- a/keystone/tests/unit/test_v3_catalog.py +++ b/keystone/tests/unit/test_v3_catalog.py @@ -39,7 +39,8 @@ class CatalogTestCase(test_v3.RestfulTestCase): r = self.put( '/regions/%s' % region_id, body={'region': ref}, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) self.assertValidRegionResponse(r, ref) # Double-check that the region ID was kept as-is and not # populated with a UUID, as is the case with POST /v3/regions @@ -52,7 +53,8 @@ class CatalogTestCase(test_v3.RestfulTestCase): r = self.put( '/regions/%s' % region_id, body={'region': ref}, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) self.assertValidRegionResponse(r, ref) # Double-check that the region ID was kept as-is and not # populated with a UUID, as is the case with POST /v3/regions @@ -64,25 +66,25 @@ class CatalogTestCase(test_v3.RestfulTestCase): region_id = ref['id'] self.put( '/regions/%s' % region_id, - body={'region': ref}, expected_status=http.client.CREATED) + body={'region': ref}, + expected_status=http.client.CREATED, + ) # Create region again with duplicate id self.put( '/regions/%s' % region_id, - body={'region': ref}, expected_status=http.client.CONFLICT) + body={'region': ref}, + expected_status=http.client.CONFLICT, + ) def test_create_region(self): """Call ``POST /regions`` with an ID in the request body.""" # the ref will have an ID defined on it ref = unit.new_region_ref() - r = self.post( - '/regions', - body={'region': ref}) + r = self.post('/regions', body={'region': ref}) self.assertValidRegionResponse(r, ref) # we should be able to get the region, having defined the ID ourselves - r = self.get( - '/regions/%(region_id)s' % { - 'region_id': ref['id']}) + r = self.get('/regions/%(region_id)s' % {'region_id': ref['id']}) self.assertValidRegionResponse(r, ref) def test_create_region_with_empty_id(self): @@ -163,7 +165,8 @@ class CatalogTestCase(test_v3.RestfulTestCase): self.put( '/regions/%s' % uuid.uuid4().hex, body={'region': ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_list_head_regions(self): """Call ``GET & HEAD /regions``.""" @@ -174,9 +177,7 @@ class CatalogTestCase(test_v3.RestfulTestCase): def _create_region_with_parent_id(self, parent_id=None): ref = unit.new_region_ref(parent_region_id=parent_id) - return self.post( - '/regions', - body={'region': ref}) + return self.post('/regions', body={'region': ref}) def test_list_regions_filtered_by_parent_region_id(self): """Call ``GET /regions?parent_region_id={parent_region_id}``.""" @@ -193,8 +194,7 @@ class CatalogTestCase(test_v3.RestfulTestCase): def test_get_head_region(self): """Call ``GET & HEAD /regions/{region_id}``.""" - resource_url = '/regions/%(region_id)s' % { - 'region_id': self.region_id} + resource_url = '/regions/%(region_id)s' % {'region_id': self.region_id} r = self.get(resource_url) self.assertValidRegionResponse(r, self.region) self.head(resource_url, expected_status=http.client.OK) @@ -203,9 +203,10 @@ class CatalogTestCase(test_v3.RestfulTestCase): """Call ``PATCH /regions/{region_id}``.""" region = unit.new_region_ref() del region['id'] - r = self.patch('/regions/%(region_id)s' % { - 'region_id': self.region_id}, - body={'region': region}) + r = self.patch( + '/regions/%(region_id)s' % {'region_id': self.region_id}, + body={'region': region}, + ) self.assertValidRegionResponse(r, region) def test_update_region_without_description_keeps_original(self): @@ -218,20 +219,23 @@ class CatalogTestCase(test_v3.RestfulTestCase): # update with something that's not the description 'parent_region_id': self.region_id, } - resp = self.patch('/regions/%s' % region_ref['id'], - body={'region': region_updates}) + resp = self.patch( + '/regions/%s' % region_ref['id'], body={'region': region_updates} + ) # NOTE(dstanek): Keystone should keep the original description. - self.assertEqual(region_ref['description'], - resp.result['region']['description']) + self.assertEqual( + region_ref['description'], resp.result['region']['description'] + ) def test_update_region_with_null_description(self): """Call ``PATCH /regions/{region_id}``.""" region = unit.new_region_ref(description=None) del region['id'] - r = self.patch('/regions/%(region_id)s' % { - 'region_id': self.region_id}, - body={'region': region}) + r = self.patch( + '/regions/%(region_id)s' % {'region_id': self.region_id}, + body={'region': region}, + ) # NOTE(dstanek): Keystone should turn the provided None value into # an empty string before storing in the backend. @@ -241,31 +245,24 @@ class CatalogTestCase(test_v3.RestfulTestCase): def test_delete_region(self): """Call ``DELETE /regions/{region_id}``.""" ref = unit.new_region_ref() - r = self.post( - '/regions', - body={'region': ref}) + r = self.post('/regions', body={'region': ref}) self.assertValidRegionResponse(r, ref) - self.delete('/regions/%(region_id)s' % { - 'region_id': ref['id']}) + self.delete('/regions/%(region_id)s' % {'region_id': ref['id']}) # service crud tests def test_create_service(self): """Call ``POST /services``.""" ref = unit.new_service_ref() - r = self.post( - '/services', - body={'service': ref}) + r = self.post('/services', body={'service': ref}) self.assertValidServiceResponse(r, ref) def test_create_service_no_name(self): """Call ``POST /services``.""" ref = unit.new_service_ref() del ref['name'] - r = self.post( - '/services', - body={'service': ref}) + r = self.post('/services', body={'service': ref}) ref['name'] = '' self.assertValidServiceResponse(r, ref) @@ -273,9 +270,7 @@ class CatalogTestCase(test_v3.RestfulTestCase): """Call ``POST /services``.""" ref = unit.new_service_ref() del ref['enabled'] - r = self.post( - '/services', - body={'service': ref}) + r = self.post('/services', body={'service': ref}) ref['enabled'] = True self.assertValidServiceResponse(r, ref) self.assertIs(True, r.result['service']['enabled']) @@ -283,38 +278,43 @@ class CatalogTestCase(test_v3.RestfulTestCase): def test_create_service_enabled_false(self): """Call ``POST /services``.""" ref = unit.new_service_ref(enabled=False) - r = self.post( - '/services', - body={'service': ref}) + r = self.post('/services', body={'service': ref}) self.assertValidServiceResponse(r, ref) self.assertIs(False, r.result['service']['enabled']) def test_create_service_enabled_true(self): """Call ``POST /services``.""" ref = unit.new_service_ref(enabled=True) - r = self.post( - '/services', - body={'service': ref}) + r = self.post('/services', body={'service': ref}) self.assertValidServiceResponse(r, ref) self.assertIs(True, r.result['service']['enabled']) def test_create_service_enabled_str_true(self): """Call ``POST /services``.""" ref = unit.new_service_ref(enabled='True') - self.post('/services', body={'service': ref}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/services', + body={'service': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_service_enabled_str_false(self): """Call ``POST /services``.""" ref = unit.new_service_ref(enabled='False') - self.post('/services', body={'service': ref}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/services', + body={'service': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_service_enabled_str_random(self): """Call ``POST /services``.""" ref = unit.new_service_ref(enabled='puppies') - self.post('/services', body={'service': ref}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/services', + body={'service': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_list_head_services(self): """Call ``GET & HEAD /services``.""" @@ -325,9 +325,7 @@ class CatalogTestCase(test_v3.RestfulTestCase): def _create_random_service(self): ref = unit.new_service_ref() - response = self.post( - '/services', - body={'service': ref}) + response = self.post('/services', body={'service': ref}) return response.json['service'] def test_filter_list_services_by_type(self): @@ -374,7 +372,8 @@ class CatalogTestCase(test_v3.RestfulTestCase): def test_get_head_service(self): """Call ``GET & HEAD /services/{service_id}``.""" resource_url = '/services/%(service_id)s' % { - 'service_id': self.service_id} + 'service_id': self.service_id + } r = self.get(resource_url) self.assertValidServiceResponse(r, self.service) self.head(resource_url, expected_status=http.client.OK) @@ -383,15 +382,17 @@ class CatalogTestCase(test_v3.RestfulTestCase): """Call ``PATCH /services/{service_id}``.""" service = unit.new_service_ref() del service['id'] - r = self.patch('/services/%(service_id)s' % { - 'service_id': self.service_id}, - body={'service': service}) + r = self.patch( + '/services/%(service_id)s' % {'service_id': self.service_id}, + body={'service': service}, + ) self.assertValidServiceResponse(r, service) def test_delete_service(self): """Call ``DELETE /services/{service_id}``.""" - self.delete('/services/%(service_id)s' % { - 'service_id': self.service_id}) + self.delete( + '/services/%(service_id)s' % {'service_id': self.service_id} + ) # endpoint crud tests @@ -402,19 +403,18 @@ class CatalogTestCase(test_v3.RestfulTestCase): self.assertValidEndpointListResponse(r, ref=self.endpoint) self.head(resource_url, expected_status=http.client.OK) - def _create_random_endpoint(self, interface='public', - parent_region_id=None): - region = self._create_region_with_parent_id( - parent_id=parent_region_id) + def _create_random_endpoint( + self, interface='public', parent_region_id=None + ): + region = self._create_region_with_parent_id(parent_id=parent_region_id) service = self._create_random_service() ref = unit.new_endpoint_ref( service_id=service['id'], interface=interface, - region_id=region.result['region']['id']) + region_id=region.result['region']['id'], + ) - response = self.post( - '/endpoints', - body={'endpoint': ref}) + response = self.post('/endpoints', body={'endpoint': ref}) return response.json['endpoint'] def test_list_endpoints_filtered_by_interface(self): @@ -470,8 +470,10 @@ class CatalogTestCase(test_v3.RestfulTestCase): """ # interface and region_id specified ref = self._create_random_endpoint(interface='internal') - response = self.get('/endpoints?interface=%s®ion_id=%s' % - (ref['interface'], ref['region_id'])) + response = self.get( + '/endpoints?interface=%s®ion_id=%s' + % (ref['interface'], ref['region_id']) + ) self.assertValidEndpointListResponse(response, ref=ref) for endpoint in response.json['endpoints']: @@ -480,8 +482,10 @@ class CatalogTestCase(test_v3.RestfulTestCase): # interface and service_id specified ref = self._create_random_endpoint(interface='internal') - response = self.get('/endpoints?interface=%s&service_id=%s' % - (ref['interface'], ref['service_id'])) + response = self.get( + '/endpoints?interface=%s&service_id=%s' + % (ref['interface'], ref['service_id']) + ) self.assertValidEndpointListResponse(response, ref=ref) for endpoint in response.json['endpoints']: @@ -490,8 +494,10 @@ class CatalogTestCase(test_v3.RestfulTestCase): # region_id and service_id specified ref = self._create_random_endpoint(interface='internal') - response = self.get('/endpoints?region_id=%s&service_id=%s' % - (ref['region_id'], ref['service_id'])) + response = self.get( + '/endpoints?region_id=%s&service_id=%s' + % (ref['region_id'], ref['service_id']) + ) self.assertValidEndpointListResponse(response, ref=ref) for endpoint in response.json['endpoints']: @@ -500,10 +506,10 @@ class CatalogTestCase(test_v3.RestfulTestCase): # interface, region_id and service_id specified ref = self._create_random_endpoint(interface='internal') - response = self.get(('/endpoints?interface=%s®ion_id=%s' - '&service_id=%s') % - (ref['interface'], ref['region_id'], - ref['service_id'])) + response = self.get( + ('/endpoints?interface=%s®ion_id=%s' '&service_id=%s') + % (ref['interface'], ref['region_id'], ref['service_id']) + ) self.assertValidEndpointListResponse(response, ref=ref) for endpoint in response.json['endpoints']: @@ -531,71 +537,96 @@ class CatalogTestCase(test_v3.RestfulTestCase): def test_create_endpoint_no_enabled(self): """Call ``POST /endpoints``.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) + ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + ) r = self.post('/endpoints', body={'endpoint': ref}) ref['enabled'] = True self.assertValidEndpointResponse(r, ref) def test_create_endpoint_enabled_true(self): """Call ``POST /endpoints`` with enabled: true.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled=True) + ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + enabled=True, + ) r = self.post('/endpoints', body={'endpoint': ref}) self.assertValidEndpointResponse(r, ref) def test_create_endpoint_enabled_false(self): """Call ``POST /endpoints`` with enabled: false.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled=False) + ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + enabled=False, + ) r = self.post('/endpoints', body={'endpoint': ref}) self.assertValidEndpointResponse(r, ref) def test_create_endpoint_enabled_str_true(self): """Call ``POST /endpoints`` with enabled: 'True'.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled='True') - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http.client.BAD_REQUEST) + ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + enabled='True', + ) + self.post( + '/endpoints', + body={'endpoint': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_endpoint_enabled_str_false(self): """Call ``POST /endpoints`` with enabled: 'False'.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled='False') - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http.client.BAD_REQUEST) + ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + enabled='False', + ) + self.post( + '/endpoints', + body={'endpoint': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_endpoint_enabled_str_random(self): """Call ``POST /endpoints`` with enabled: 'puppies'.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled='puppies') - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http.client.BAD_REQUEST) + ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + enabled='puppies', + ) + self.post( + '/endpoints', + body={'endpoint': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_endpoint_with_invalid_region_id(self): """Call ``POST /endpoints``.""" ref = unit.new_endpoint_ref(service_id=self.service_id) - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/endpoints', + body={'endpoint': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_endpoint_with_region(self): """EndpointV3 creates the region before creating the endpoint. This occurs when endpoint is provided with 'region' and no 'region_id'. """ - ref = unit.new_endpoint_ref_with_region(service_id=self.service_id, - region=uuid.uuid4().hex) + ref = unit.new_endpoint_ref_with_region( + service_id=self.service_id, region=uuid.uuid4().hex + ) self.post('/endpoints', body={'endpoint': ref}) # Make sure the region is created self.get('/regions/%(region_id)s' % {'region_id': ref["region"]}) @@ -609,44 +640,50 @@ class CatalogTestCase(test_v3.RestfulTestCase): def test_create_endpoint_with_empty_url(self): """Call ``POST /endpoints``.""" ref = unit.new_endpoint_ref(service_id=self.service_id, url='') - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/endpoints', + body={'endpoint': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_get_head_endpoint(self): """Call ``GET & HEAD /endpoints/{endpoint_id}``.""" resource_url = '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id} + 'endpoint_id': self.endpoint_id + } r = self.get(resource_url) self.assertValidEndpointResponse(r, self.endpoint) self.head(resource_url, expected_status=http.client.OK) def test_update_endpoint(self): """Call ``PATCH /endpoints/{endpoint_id}``.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) + ref = unit.new_endpoint_ref( + service_id=self.service_id, + interface='public', + region_id=self.region_id, + ) del ref['id'] r = self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': ref}) + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint_id}, + body={'endpoint': ref}, + ) ref['enabled'] = True self.assertValidEndpointResponse(r, ref) def test_update_endpoint_enabled_true(self): """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: True.""" r = self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': {'enabled': True}}) + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint_id}, + body={'endpoint': {'enabled': True}}, + ) self.assertValidEndpointResponse(r, self.endpoint) def test_update_endpoint_enabled_false(self): """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: False.""" r = self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': {'enabled': False}}) + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint_id}, + body={'endpoint': {'enabled': False}}, + ) exp_endpoint = copy.copy(self.endpoint) exp_endpoint['enabled'] = False self.assertValidEndpointResponse(r, exp_endpoint) @@ -654,44 +691,46 @@ class CatalogTestCase(test_v3.RestfulTestCase): def test_update_endpoint_enabled_str_true(self): """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'True'.""" self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint_id}, body={'endpoint': {'enabled': 'True'}}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_endpoint_enabled_str_false(self): """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'False'.""" self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint_id}, body={'endpoint': {'enabled': 'False'}}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_endpoint_enabled_str_random(self): """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'kitties'.""" self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint_id}, body={'endpoint': {'enabled': 'kitties'}}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_delete_endpoint(self): """Call ``DELETE /endpoints/{endpoint_id}``.""" self.delete( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}) + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint_id} + ) def test_deleting_endpoint_with_space_in_url(self): # add a space to all urls (intentional "i d" to test bug) url_with_space = "http://127.0.0.1:8774 /v1.1/\\$(tenant_i d)s" # create a v3 endpoint ref - ref = unit.new_endpoint_ref(service_id=self.service['id'], - region_id=None, - publicurl=url_with_space, - internalurl=url_with_space, - adminurl=url_with_space, - url=url_with_space) + ref = unit.new_endpoint_ref( + service_id=self.service['id'], + region_id=None, + publicurl=url_with_space, + internalurl=url_with_space, + adminurl=url_with_space, + url=url_with_space, + ) # add the endpoint to the database PROVIDERS.catalog_api.create_endpoint(ref['id'], ref) @@ -700,28 +739,33 @@ class CatalogTestCase(test_v3.RestfulTestCase): self.delete('/endpoints/%s' % ref['id']) # make sure it's deleted (GET should return Not Found) - self.get('/endpoints/%s' % ref['id'], - expected_status=http.client.NOT_FOUND) + self.get( + '/endpoints/%s' % ref['id'], expected_status=http.client.NOT_FOUND + ) def test_endpoint_create_with_valid_url(self): """Create endpoint with valid url should be tested,too.""" # list one valid url is enough, no need to list too much valid_url = 'http://127.0.0.1:8774/v1.1/$(project_id)s' - ref = unit.new_endpoint_ref(self.service_id, - interface='public', - region_id=self.region_id, - url=valid_url) + ref = unit.new_endpoint_ref( + self.service_id, + interface='public', + region_id=self.region_id, + url=valid_url, + ) self.post('/endpoints', body={'endpoint': ref}) def test_endpoint_create_with_valid_url_project_id(self): """Create endpoint with valid url should be tested,too.""" valid_url = 'http://127.0.0.1:8774/v1.1/$(project_id)s' - ref = unit.new_endpoint_ref(self.service_id, - interface='public', - region_id=self.region_id, - url=valid_url) + ref = unit.new_endpoint_ref( + self.service_id, + interface='public', + region_id=self.region_id, + url=valid_url, + ) self.post('/endpoints', body={'endpoint': ref}) def test_endpoint_create_with_invalid_url(self): @@ -729,12 +773,10 @@ class CatalogTestCase(test_v3.RestfulTestCase): invalid_urls = [ # using a substitution that is not whitelisted - KeyError 'http://127.0.0.1:8774/v1.1/$(nonexistent)s', - # invalid formatting - ValueError 'http://127.0.0.1:8774/v1.1/$(project_id)', 'http://127.0.0.1:8774/v1.1/$(project_id)t', 'http://127.0.0.1:8774/v1.1/$(project_id', - # invalid type specifier - TypeError # admin_url is a string not an int 'http://127.0.0.1:8774/v1.1/$(admin_url)d', @@ -744,9 +786,11 @@ class CatalogTestCase(test_v3.RestfulTestCase): for invalid_url in invalid_urls: ref['url'] = invalid_url - self.post('/endpoints', - body={'endpoint': ref}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/endpoints', + body={'endpoint': ref}, + expected_status=http.client.BAD_REQUEST, + ) class TestMultiRegion(test_v3.RestfulTestCase): @@ -755,24 +799,20 @@ class TestMultiRegion(test_v3.RestfulTestCase): # Create two separate regions first_region = self.post( - '/regions', - body={'region': unit.new_region_ref()} + '/regions', body={'region': unit.new_region_ref()} ).json_body['region'] second_region = self.post( - '/regions', - body={'region': unit.new_region_ref()} + '/regions', body={'region': unit.new_region_ref()} ).json_body['region'] # Create two services with the same type but separate name. first_service = self.post( - '/services', - body={'service': unit.new_service_ref(type='foobar')} + '/services', body={'service': unit.new_service_ref(type='foobar')} ).json_body['service'] second_service = self.post( - '/services', - body={'service': unit.new_service_ref(type='foobar')} + '/services', body={'service': unit.new_service_ref(type='foobar')} ).json_body['service'] # Create an endpoint for each service @@ -782,9 +822,9 @@ class TestMultiRegion(test_v3.RestfulTestCase): 'endpoint': unit.new_endpoint_ref( first_service['id'], interface='public', - region_id=first_region['id'] + region_id=first_region['id'], ) - } + }, ).json_body['endpoint'] second_endpoint = self.post( @@ -793,9 +833,9 @@ class TestMultiRegion(test_v3.RestfulTestCase): 'endpoint': unit.new_endpoint_ref( second_service['id'], interface='public', - region_id=second_region['id'] + region_id=second_region['id'], ) - } + }, ).json_body['endpoint'] # Assert the endpoints and services from each region are in the @@ -834,11 +874,13 @@ class TestCatalogAPISQL(unit.TestCase): self.create_endpoint(service_id=self.service_id) PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) def create_endpoint(self, service_id, **kwargs): - endpoint = unit.new_endpoint_ref(service_id=service_id, - region_id=None, **kwargs) + endpoint = unit.new_endpoint_ref( + service_id=service_id, region_id=None, **kwargs + ) PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) return endpoint @@ -865,12 +907,14 @@ class TestCatalogAPISQL(unit.TestCase): self.assertEqual(1, len(PROVIDERS.catalog_api.list_endpoints())) # create a new, invalid endpoint - malformed type declaration - self.create_endpoint(self.service_id, - url='http://keystone/%(project_id)') + self.create_endpoint( + self.service_id, url='http://keystone/%(project_id)' + ) # create a new, invalid endpoint - nonexistent key - self.create_endpoint(self.service_id, - url='http://keystone/%(you_wont_find_me)s') + self.create_endpoint( + self.service_id, url='http://keystone/%(you_wont_find_me)s' + ) # verify that the invalid endpoints don't appear in the catalog catalog = PROVIDERS.catalog_api.get_v3_catalog(user_id, project['id']) @@ -879,8 +923,9 @@ class TestCatalogAPISQL(unit.TestCase): self.assertEqual(3, len(PROVIDERS.catalog_api.list_endpoints())) # create another valid endpoint - project_id will be replaced - self.create_endpoint(self.service_id, - url='http://keystone/%(project_id)s') + self.create_endpoint( + self.service_id, url='http://keystone/%(project_id)s' + ) # there are two valid endpoints, positive check catalog = PROVIDERS.catalog_api.get_v3_catalog(user_id, project['id']) @@ -915,12 +960,14 @@ class TestCatalogAPISQL(unit.TestCase): catalog = PROVIDERS.catalog_api.get_v3_catalog(user_id, project['id']) - named_endpoint = [ep for ep in catalog - if ep['type'] == named_svc['type']][0] + named_endpoint = [ + ep for ep in catalog if ep['type'] == named_svc['type'] + ][0] self.assertEqual(named_svc['name'], named_endpoint['name']) - unnamed_endpoint = [ep for ep in catalog - if ep['type'] == unnamed_svc['type']][0] + unnamed_endpoint = [ + ep for ep in catalog if ep['type'] == unnamed_svc['type'] + ][0] self.assertEqual('', unnamed_endpoint['name']) @@ -934,7 +981,8 @@ class TestCatalogAPISQLRegions(unit.TestCase): self.useFixture(database.Database()) self.load_backends() PROVIDERS.resource_api.create_domain( - default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN) + default_fixtures.ROOT_DOMAIN['id'], default_fixtures.ROOT_DOMAIN + ) def config_overrides(self): super(TestCatalogAPISQLRegions, self).config_overrides() @@ -945,8 +993,7 @@ class TestCatalogAPISQLRegions(unit.TestCase): service_id = service['id'] PROVIDERS.catalog_api.create_service(service_id, service) - endpoint = unit.new_endpoint_ref(service_id=service_id, - region_id=None) + endpoint = unit.new_endpoint_ref(service_id=service_id, region_id=None) del endpoint['region_id'] PROVIDERS.catalog_api.create_endpoint(endpoint['id'], endpoint) @@ -960,7 +1007,8 @@ class TestCatalogAPISQLRegions(unit.TestCase): user_id = uuid.uuid4().hex catalog = PROVIDERS.catalog_api.get_v3_catalog(user_id, project['id']) self.assertValidCatalogEndpoint( - catalog[0]['endpoints'][0], ref=endpoint) + catalog[0]['endpoints'][0], ref=endpoint + ) def test_get_catalog_returns_proper_endpoints_with_region(self): service = unit.new_service_ref() @@ -984,7 +1032,8 @@ class TestCatalogAPISQLRegions(unit.TestCase): catalog = PROVIDERS.catalog_api.get_v3_catalog(user_id, project['id']) self.assertValidCatalogEndpoint( - catalog[0]['endpoints'][0], ref=endpoint) + catalog[0]['endpoints'][0], ref=endpoint + ) def assertValidCatalogEndpoint(self, entity, ref=None): keys = ['description', 'id', 'interface', 'name', 'region_id', 'url'] @@ -1022,5 +1071,5 @@ class TestCatalogAPITemplatedProject(test_v3.RestfulTestCase): templated catalog, there is no testing to do for that action. """ self.delete( - '/projects/%(project_id)s' % { - 'project_id': self.project_id}) + '/projects/%(project_id)s' % {'project_id': self.project_id} + ) diff --git a/keystone/tests/unit/test_v3_credential.py b/keystone/tests/unit/test_v3_credential.py index 6573f4402a..875bc907e3 100644 --- a/keystone/tests/unit/test_v3_credential.py +++ b/keystone/tests/unit/test_v3_credential.py @@ -46,13 +46,14 @@ class CredentialBaseTestCase(test_v3.RestfulTestCase): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_fernet.MAX_ACTIVE_KEYS + credential_fernet.MAX_ACTIVE_KEYS, ) ) def _create_dict_blob_credential(self): - blob, credential = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) + blob, credential = unit.new_ec2_credential( + user_id=self.user['id'], project_id=self.project_id + ) # Store the blob as a dict *not* JSON ref bug #1259584 # This means we can test the dict->json workaround, added @@ -68,27 +69,34 @@ class CredentialBaseTestCase(test_v3.RestfulTestCase): def _test_get_token(self, access, secret): """Test signature validation with the access/secret provided.""" signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - request = {'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} + params = { + 'SignatureMethod': 'HmacSHA256', + 'SignatureVersion': '2', + 'AWSAccessKeyId': access, + } + request = { + 'host': 'foo', + 'verb': 'GET', + 'path': '/bar', + 'params': params, + } signature = signer.generate(request) # Now make a request to validate the signed dummy request via the # ec2tokens API. This proves the v3 ec2 credentials actually work. - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} + sig_ref = { + 'access': access, + 'signature': signature, + 'host': 'foo', + 'verb': 'GET', + 'path': '/bar', + 'params': params, + } r = self.post( '/ec2tokens', body={'ec2Credentials': sig_ref}, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) self.assertValidTokenResponse(r) return r.result['token'] @@ -100,12 +108,13 @@ class CredentialTestCase(CredentialBaseTestCase): super(CredentialTestCase, self).setUp() - self.credential = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id) + self.credential = unit.new_credential_ref( + user_id=self.user['id'], project_id=self.project_id + ) PROVIDERS.credential_api.create_credential( - self.credential['id'], - self.credential) + self.credential['id'], self.credential + ) def test_credential_api_delete_credentials_for_project(self): PROVIDERS.credential_api.delete_credentials_for_project( @@ -113,17 +122,21 @@ class CredentialTestCase(CredentialBaseTestCase): ) # Test that the credential that we created in .setUp no longer exists # once we delete all credentials for self.project_id - self.assertRaises(exception.CredentialNotFound, - PROVIDERS.credential_api.get_credential, - credential_id=self.credential['id']) + self.assertRaises( + exception.CredentialNotFound, + PROVIDERS.credential_api.get_credential, + credential_id=self.credential['id'], + ) def test_credential_api_delete_credentials_for_user(self): PROVIDERS.credential_api.delete_credentials_for_user(self.user_id) # Test that the credential that we created in .setUp no longer exists # once we delete all credentials for self.user_id - self.assertRaises(exception.CredentialNotFound, - PROVIDERS.credential_api.get_credential, - credential_id=self.credential['id']) + self.assertRaises( + exception.CredentialNotFound, + PROVIDERS.credential_api.get_credential, + credential_id=self.credential['id'], + ) def test_list_credentials(self): """Call ``GET /credentials``.""" @@ -151,12 +164,15 @@ class CredentialTestCase(CredentialBaseTestCase): # The type ec2 was chosen, instead of a random string, # because the type must be in the list of supported types - ec2_credential = unit.new_credential_ref(user_id=uuid.uuid4().hex, - project_id=self.project_id, - type=CRED_TYPE_EC2) + ec2_credential = unit.new_credential_ref( + user_id=uuid.uuid4().hex, + project_id=self.project_id, + type=CRED_TYPE_EC2, + ) ec2_resp = PROVIDERS.credential_api.create_credential( - ec2_credential['id'], ec2_credential) + ec2_credential['id'], ec2_credential + ) # The type cert was chosen for the same reason as ec2 r = self.get('/credentials?type=cert', token=token) @@ -185,17 +201,21 @@ class CredentialTestCase(CredentialBaseTestCase): token = self.get_system_scoped_token() # Creating credentials for two different users - credential_user1_ec2 = unit.new_credential_ref(user_id=user1_id, - type=CRED_TYPE_EC2) + credential_user1_ec2 = unit.new_credential_ref( + user_id=user1_id, type=CRED_TYPE_EC2 + ) credential_user1_cert = unit.new_credential_ref(user_id=user1_id) credential_user2_cert = unit.new_credential_ref(user_id=user2_id) PROVIDERS.credential_api.create_credential( - credential_user1_ec2['id'], credential_user1_ec2) + credential_user1_ec2['id'], credential_user1_ec2 + ) PROVIDERS.credential_api.create_credential( - credential_user1_cert['id'], credential_user1_cert) + credential_user1_cert['id'], credential_user1_cert + ) PROVIDERS.credential_api.create_credential( - credential_user2_cert['id'], credential_user2_cert) + credential_user2_cert['id'], credential_user2_cert + ) r = self.get( '/credentials?user_id=%s&type=ec2' % user1_id, token=token @@ -209,53 +229,51 @@ class CredentialTestCase(CredentialBaseTestCase): def test_create_credential(self): """Call ``POST /credentials``.""" ref = unit.new_credential_ref(user_id=self.user['id']) - r = self.post( - '/credentials', - body={'credential': ref}) + r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) def test_get_credential(self): """Call ``GET /credentials/{credential_id}``.""" r = self.get( - '/credentials/%(credential_id)s' % { - 'credential_id': self.credential['id']}) + '/credentials/%(credential_id)s' + % {'credential_id': self.credential['id']} + ) self.assertValidCredentialResponse(r, self.credential) def test_update_credential(self): """Call ``PATCH /credentials/{credential_id}``.""" - ref = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id) + ref = unit.new_credential_ref( + user_id=self.user['id'], project_id=self.project_id + ) del ref['id'] r = self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': self.credential['id']}, - body={'credential': ref}) + '/credentials/%(credential_id)s' + % {'credential_id': self.credential['id']}, + body={'credential': ref}, + ) self.assertValidCredentialResponse(r, ref) def test_update_credential_to_ec2_type(self): """Call ``PATCH /credentials/{credential_id}``.""" # Create a credential without providing a project_id ref = unit.new_credential_ref(user_id=self.user['id']) - r = self.post( - '/credentials', - body={'credential': ref}) + r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) credential_id = r.result.get('credential')['id'] # Updating the credential to ec2 requires a project_id update_ref = {'type': 'ec2', 'project_id': self.project_id} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, - body={'credential': update_ref}) + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, + body={'credential': update_ref}, + ) def test_update_credential_to_ec2_missing_project_id(self): """Call ``PATCH /credentials/{credential_id}``.""" # Create a credential without providing a project_id ref = unit.new_credential_ref(user_id=self.user['id']) - r = self.post( - '/credentials', - body={'credential': ref}) + r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) credential_id = r.result.get('credential')['id'] @@ -263,19 +281,19 @@ class CredentialTestCase(CredentialBaseTestCase): # will fail update_ref = {'type': 'ec2'} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_credential_to_ec2_with_previously_set_project_id(self): """Call ``PATCH /credentials/{credential_id}``.""" # Create a credential providing a project_id - ref = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id) - r = self.post( - '/credentials', - body={'credential': ref}) + ref = unit.new_credential_ref( + user_id=self.user['id'], project_id=self.project_id + ) + r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) credential_id = r.result.get('credential')['id'] @@ -283,167 +301,175 @@ class CredentialTestCase(CredentialBaseTestCase): # update request will not fail update_ref = {'type': 'ec2'} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, - body={'credential': update_ref}) + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, + body={'credential': update_ref}, + ) def test_update_credential_non_owner(self): """Call ``PATCH /credentials/{credential_id}``.""" alt_user = unit.create_user( - PROVIDERS.identity_api, domain_id=self.domain_id) + PROVIDERS.identity_api, domain_id=self.domain_id + ) alt_user_id = alt_user['id'] alt_project = unit.new_project_ref(domain_id=self.domain_id) alt_project_id = alt_project['id'] - PROVIDERS.resource_api.create_project( - alt_project['id'], alt_project) + PROVIDERS.resource_api.create_project(alt_project['id'], alt_project) alt_role = unit.new_role_ref(name='reader') alt_role_id = alt_role['id'] PROVIDERS.role_api.create_role(alt_role_id, alt_role) PROVIDERS.assignment_api.add_role_to_user_and_project( - alt_user_id, alt_project_id, alt_role_id) + alt_user_id, alt_project_id, alt_role_id + ) auth = self.build_authentication_request( user_id=alt_user_id, password=alt_user['password'], - project_id=alt_project_id) - ref = unit.new_credential_ref(user_id=alt_user_id, - project_id=alt_project_id) - r = self.post( - '/credentials', - auth=auth, - body={'credential': ref}) + project_id=alt_project_id, + ) + ref = unit.new_credential_ref( + user_id=alt_user_id, project_id=alt_project_id + ) + r = self.post('/credentials', auth=auth, body={'credential': ref}) self.assertValidCredentialResponse(r, ref) credential_id = r.result.get('credential')['id'] # Cannot change the credential to be owned by another user update_ref = {'user_id': self.user_id, 'project_id': self.project_id} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, expected_status=403, auth=auth, - body={'credential': update_ref}) + body={'credential': update_ref}, + ) def test_update_ec2_credential_change_trust_id(self): """Call ``PATCH /credentials/{credential_id}``.""" - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user['id'], project_id=self.project_id + ) blob['trust_id'] = uuid.uuid4().hex ref['blob'] = json.dumps(blob) - r = self.post( - '/credentials', - body={'credential': ref}) + r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) credential_id = r.result.get('credential')['id'] # Try changing to a different trust blob['trust_id'] = uuid.uuid4().hex update_ref = {'blob': json.dumps(blob)} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) # Try removing the trust del blob['trust_id'] update_ref = {'blob': json.dumps(blob)} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_ec2_credential_change_app_cred_id(self): """Call ``PATCH /credentials/{credential_id}``.""" - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user['id'], project_id=self.project_id + ) blob['app_cred_id'] = uuid.uuid4().hex ref['blob'] = json.dumps(blob) - r = self.post( - '/credentials', - body={'credential': ref}) + r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) credential_id = r.result.get('credential')['id'] # Try changing to a different app cred blob['app_cred_id'] = uuid.uuid4().hex update_ref = {'blob': json.dumps(blob)} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) # Try removing the app cred del blob['app_cred_id'] update_ref = {'blob': json.dumps(blob)} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_ec2_credential_change_access_token_id(self): """Call ``PATCH /credentials/{credential_id}``.""" - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user['id'], project_id=self.project_id + ) blob['access_token_id'] = uuid.uuid4().hex ref['blob'] = json.dumps(blob) - r = self.post( - '/credentials', - body={'credential': ref}) + r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) credential_id = r.result.get('credential')['id'] # Try changing to a different access token blob['access_token_id'] = uuid.uuid4().hex update_ref = {'blob': json.dumps(blob)} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) # Try removing the access token del blob['access_token_id'] update_ref = {'blob': json.dumps(blob)} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_ec2_credential_change_access_id(self): """Call ``PATCH /credentials/{credential_id}``.""" - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user['id'], project_id=self.project_id + ) blob['access_id'] = uuid.uuid4().hex ref['blob'] = json.dumps(blob) - r = self.post( - '/credentials', - body={'credential': ref}) + r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) credential_id = r.result.get('credential')['id'] # Try changing to a different access_id blob['access_id'] = uuid.uuid4().hex update_ref = {'blob': json.dumps(blob)} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) # Try removing the access_id del blob['access_id'] update_ref = {'blob': json.dumps(blob)} self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}, + '/credentials/%(credential_id)s' + % {'credential_id': credential_id}, body={'credential': update_ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_delete_credential(self): """Call ``DELETE /credentials/{credential_id}``.""" self.delete( - '/credentials/%(credential_id)s' % { - 'credential_id': self.credential['id']}) + '/credentials/%(credential_id)s' + % {'credential_id': self.credential['id']} + ) def test_delete_credential_retries_on_deadlock(self): - patcher = mock.patch('sqlalchemy.orm.query.Query.delete', - autospec=True) + patcher = mock.patch( + 'sqlalchemy.orm.query.Query.delete', autospec=True + ) class FakeDeadlock(object): def __init__(self, mock_patcher): @@ -465,7 +491,8 @@ class CredentialTestCase(CredentialBaseTestCase): try: PROVIDERS.credential_api.delete_credentials_for_user( - user_id=self.user['id']) + user_id=self.user['id'] + ) finally: if side_effect.patched: patcher.stop() @@ -475,33 +502,39 @@ class CredentialTestCase(CredentialBaseTestCase): def test_create_ec2_credential(self): """Call ``POST /credentials`` for creating ec2 credential.""" - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user['id'], project_id=self.project_id + ) r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) # Assert credential id is same as hash of access key id for # ec2 credentials access = blob['access'].encode('utf-8') - self.assertEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) + self.assertEqual( + hashlib.sha256(access).hexdigest(), r.result['credential']['id'] + ) # Create second ec2 credential with the same access key id and check # for conflict. self.post( '/credentials', - body={'credential': ref}, expected_status=http.client.CONFLICT) + body={'credential': ref}, + expected_status=http.client.CONFLICT, + ) def test_get_ec2_dict_blob(self): """Ensure non-JSON blob data is correctly converted.""" expected_blob, credential_id = self._create_dict_blob_credential() r = self.get( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}) + '/credentials/%(credential_id)s' % {'credential_id': credential_id} + ) # use json.loads to transform the blobs back into Python dictionaries # to avoid problems with the keys being in different orders. - self.assertEqual(json.loads(expected_blob), - json.loads(r.result['credential']['blob'])) + self.assertEqual( + json.loads(expected_blob), + json.loads(r.result['credential']['blob']), + ) def test_list_ec2_dict_blob(self): """Ensure non-JSON blob data is correctly converted.""" @@ -515,8 +548,9 @@ class CredentialTestCase(CredentialBaseTestCase): # to avoid problems with the keys being in different orders. for r in list_creds: if r['id'] == credential_id: - self.assertEqual(json.loads(expected_blob), - json.loads(r['blob'])) + self.assertEqual( + json.loads(expected_blob), json.loads(r['blob']) + ) def test_create_non_ec2_credential(self): """Test creating non-ec2 credential. @@ -530,34 +564,42 @@ class CredentialTestCase(CredentialBaseTestCase): # Assert credential id is not same as hash of access key id for # non-ec2 credentials access = blob['access'].encode('utf-8') - self.assertNotEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) + self.assertNotEqual( + hashlib.sha256(access).hexdigest(), r.result['credential']['id'] + ) def test_create_ec2_credential_with_missing_project_id(self): """Test Creating ec2 credential with missing project_id. Call ``POST /credentials``. """ - _, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=None) + _, ref = unit.new_ec2_credential( + user_id=self.user['id'], project_id=None + ) # Assert bad request status when missing project_id self.post( '/credentials', - body={'credential': ref}, expected_status=http.client.BAD_REQUEST) + body={'credential': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_ec2_credential_with_invalid_blob(self): """Test creating ec2 credential with invalid blob. Call ``POST /credentials``. """ - ref = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id, - blob='{"abc":"def"d}', - type=CRED_TYPE_EC2) + ref = unit.new_credential_ref( + user_id=self.user['id'], + project_id=self.project_id, + blob='{"abc":"def"d}', + type=CRED_TYPE_EC2, + ) # Assert bad request status when request contains invalid blob response = self.post( '/credentials', - body={'credential': ref}, expected_status=http.client.BAD_REQUEST) + body={'credential': ref}, + expected_status=http.client.BAD_REQUEST, + ) self.assertValidErrorResponse(response) def test_create_credential_with_admin_token(self): @@ -566,7 +608,8 @@ class CredentialTestCase(CredentialBaseTestCase): r = self.post( '/credentials', body={'credential': ref}, - token=self.get_admin_token()) + token=self.get_admin_token(), + ) self.assertValidCredentialResponse(r, ref) @@ -587,7 +630,7 @@ class TestCredentialTrustScoped(CredentialBaseTestCase): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_fernet.MAX_ACTIVE_KEYS + credential_fernet.MAX_ACTIVE_KEYS, ) ) @@ -607,7 +650,8 @@ class TestCredentialTrustScoped(CredentialBaseTestCase): project_id=self.project_id, impersonation=True, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) del ref['id'] r = self.post('/OS-TRUST/trusts', body={'trust': ref}) trust = self.assertValidTrustResponse(r) @@ -616,15 +660,17 @@ class TestCredentialTrustScoped(CredentialBaseTestCase): auth_data = self.build_authentication_request( user_id=self.trustee_user['id'], password=self.trustee_user['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) r = self.v3_create_token(auth_data) self.assertValidProjectScopedTokenResponse(r, self.user) trust_id = r.result['token']['OS-TRUST:trust']['id'] token_id = r.headers.get('X-Subject-Token') # Create the credential with the trust scoped token - blob, ref = unit.new_ec2_credential(user_id=self.user_id, - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user_id, project_id=self.project_id + ) r = self.post('/credentials', body={'credential': ref}, token=token_id) # We expect the response blob to contain the trust_id @@ -637,8 +683,9 @@ class TestCredentialTrustScoped(CredentialBaseTestCase): # Assert credential id is same as hash of access key id for # ec2 credentials access = blob['access'].encode('utf-8') - self.assertEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) + self.assertEqual( + hashlib.sha256(access).hexdigest(), r.result['credential']['id'] + ) # Create a role assignment to ensure that it is ignored and only the # trust-delegated roles are used @@ -646,11 +693,13 @@ class TestCredentialTrustScoped(CredentialBaseTestCase): role_id = role['id'] PROVIDERS.role_api.create_role(role_id, role) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_id, self.project_id, role_id) + self.user_id, self.project_id, role_id + ) ret_blob = json.loads(r.result['credential']['blob']) ec2token = self._test_get_token( - access=ret_blob['access'], secret=ret_blob['secret']) + access=ret_blob['access'], secret=ret_blob['secret'] + ) ec2_roles = [role['id'] for role in ec2token['roles']] self.assertIn(self.role_id, ec2_roles) self.assertNotIn(role_id, ec2_roles) @@ -661,7 +710,8 @@ class TestCredentialTrustScoped(CredentialBaseTestCase): '/credentials', body={'credential': ref}, token=token_id, - expected_status=http.client.CONFLICT) + expected_status=http.client.CONFLICT, + ) class TestCredentialAppCreds(CredentialBaseTestCase): @@ -673,7 +723,7 @@ class TestCredentialAppCreds(CredentialBaseTestCase): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_fernet.MAX_ACTIVE_KEYS + credential_fernet.MAX_ACTIVE_KEYS, ) ) @@ -685,20 +735,23 @@ class TestCredentialAppCreds(CredentialBaseTestCase): # Create the app cred ref = unit.new_application_credential_ref(roles=[{'id': self.role_id}]) del ref['id'] - r = self.post('/users/%s/application_credentials' % self.user_id, - body={'application_credential': ref}) + r = self.post( + '/users/%s/application_credentials' % self.user_id, + body={'application_credential': ref}, + ) app_cred = r.result['application_credential'] # Get an application credential token auth_data = self.build_authentication_request( - app_cred_id=app_cred['id'], - secret=app_cred['secret']) + app_cred_id=app_cred['id'], secret=app_cred['secret'] + ) r = self.v3_create_token(auth_data) token_id = r.headers.get('X-Subject-Token') # Create the credential with the app cred token - blob, ref = unit.new_ec2_credential(user_id=self.user_id, - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user_id, project_id=self.project_id + ) r = self.post('/credentials', body={'credential': ref}, token=token_id) # We expect the response blob to contain the app_cred_id @@ -711,8 +764,9 @@ class TestCredentialAppCreds(CredentialBaseTestCase): # Assert credential id is same as hash of access key id for # ec2 credentials access = blob['access'].encode('utf-8') - self.assertEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) + self.assertEqual( + hashlib.sha256(access).hexdigest(), r.result['credential']['id'] + ) # Create a role assignment to ensure that it is ignored and only the # roles in the app cred are used @@ -720,11 +774,13 @@ class TestCredentialAppCreds(CredentialBaseTestCase): role_id = role['id'] PROVIDERS.role_api.create_role(role_id, role) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_id, self.project_id, role_id) + self.user_id, self.project_id, role_id + ) ret_blob = json.loads(r.result['credential']['blob']) ec2token = self._test_get_token( - access=ret_blob['access'], secret=ret_blob['secret']) + access=ret_blob['access'], secret=ret_blob['secret'] + ) ec2_roles = [role['id'] for role in ec2token['roles']] self.assertIn(self.role_id, ec2_roles) self.assertNotIn(role_id, ec2_roles) @@ -735,7 +791,8 @@ class TestCredentialAppCreds(CredentialBaseTestCase): '/credentials', body={'credential': ref}, token=token_id, - expected_status=http.client.CONFLICT) + expected_status=http.client.CONFLICT, + ) class TestCredentialAccessToken(CredentialBaseTestCase): @@ -747,7 +804,7 @@ class TestCredentialAccessToken(CredentialBaseTestCase): ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_fernet.MAX_ACTIVE_KEYS + credential_fernet.MAX_ACTIVE_KEYS, ) ) self.base_url = 'http://localhost/v3' @@ -765,42 +822,50 @@ class TestCredentialAccessToken(CredentialBaseTestCase): def _create_request_token(self, consumer, project_id, base_url=None): endpoint = '/OS-OAUTH1/request_token' - client = oauth1.Client(consumer['key'], - client_secret=consumer['secret'], - signature_method=oauth1.SIG_HMAC, - callback_uri="oob") + client = oauth1.Client( + consumer['key'], + client_secret=consumer['secret'], + signature_method=oauth1.SIG_HMAC, + callback_uri="oob", + ) headers = {'requested_project_id': project_id} if not base_url: base_url = self.base_url - url, headers, body = client.sign(base_url + endpoint, - http_method='POST', - headers=headers) + url, headers, body = client.sign( + base_url + endpoint, http_method='POST', headers=headers + ) return endpoint, headers def _create_access_token(self, consumer, token, base_url=None): endpoint = '/OS-OAUTH1/access_token' - client = oauth1.Client(consumer['key'], - client_secret=consumer['secret'], - resource_owner_key=token.key, - resource_owner_secret=token.secret, - signature_method=oauth1.SIG_HMAC, - verifier=token.verifier) + client = oauth1.Client( + consumer['key'], + client_secret=consumer['secret'], + resource_owner_key=token.key, + resource_owner_secret=token.secret, + signature_method=oauth1.SIG_HMAC, + verifier=token.verifier, + ) if not base_url: base_url = self.base_url - url, headers, body = client.sign(base_url + endpoint, - http_method='POST') + url, headers, body = client.sign( + base_url + endpoint, http_method='POST' + ) headers.update({'Content-Type': 'application/json'}) return endpoint, headers def _get_oauth_token(self, consumer, token): - client = oauth1.Client(consumer['key'], - client_secret=consumer['secret'], - resource_owner_key=token.key, - resource_owner_secret=token.secret, - signature_method=oauth1.SIG_HMAC) + client = oauth1.Client( + consumer['key'], + client_secret=consumer['secret'], + resource_owner_key=token.key, + resource_owner_secret=token.secret, + signature_method=oauth1.SIG_HMAC, + ) endpoint = '/auth/tokens' - url, headers, body = client.sign(self.base_url + endpoint, - http_method='POST') + url, headers, body = client.sign( + self.base_url + endpoint, http_method='POST' + ) headers.update({'Content-Type': 'application/json'}) ref = {'auth': {'identity': {'oauth1': {}, 'methods': ['oauth1']}}} return endpoint, headers, ref @@ -818,8 +883,10 @@ class TestCredentialAccessToken(CredentialBaseTestCase): url, headers = self._create_request_token(consumer, self.project_id) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = self._urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] request_secret = credentials['oauth_token_secret'][0] @@ -833,8 +900,10 @@ class TestCredentialAccessToken(CredentialBaseTestCase): request_token.set_verifier(verifier) url, headers = self._create_access_token(consumer, request_token) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = self._urllib_parse_qs_text_keys(content.result) access_key = credentials['oauth_token'][0] access_secret = credentials['oauth_token_secret'][0] @@ -852,8 +921,9 @@ class TestCredentialAccessToken(CredentialBaseTestCase): access_key, token_id = self._get_access_token() # Create the credential with the access token - blob, ref = unit.new_ec2_credential(user_id=self.user_id, - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user_id, project_id=self.project_id + ) r = self.post('/credentials', body={'credential': ref}, token=token_id) # We expect the response blob to contain the access_token_id @@ -866,8 +936,9 @@ class TestCredentialAccessToken(CredentialBaseTestCase): # Assert credential id is same as hash of access key id for # ec2 credentials access = blob['access'].encode('utf-8') - self.assertEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) + self.assertEqual( + hashlib.sha256(access).hexdigest(), r.result['credential']['id'] + ) # Create a role assignment to ensure that it is ignored and only the # roles in the access token are used @@ -875,11 +946,13 @@ class TestCredentialAccessToken(CredentialBaseTestCase): role_id = role['id'] PROVIDERS.role_api.create_role(role_id, role) PROVIDERS.assignment_api.add_role_to_user_and_project( - self.user_id, self.project_id, role_id) + self.user_id, self.project_id, role_id + ) ret_blob = json.loads(r.result['credential']['blob']) ec2token = self._test_get_token( - access=ret_blob['access'], secret=ret_blob['secret']) + access=ret_blob['access'], secret=ret_blob['secret'] + ) ec2_roles = [role['id'] for role in ec2token['roles']] self.assertIn(self.role_id, ec2_roles) self.assertNotIn(role_id, ec2_roles) @@ -890,26 +963,30 @@ class TestCredentialEc2(CredentialBaseTestCase): def test_ec2_credential_signature_validate(self): """Test signature validation with a v3 ec2 credential.""" - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) + blob, ref = unit.new_ec2_credential( + user_id=self.user['id'], project_id=self.project_id + ) r = self.post('/credentials', body={'credential': ref}) self.assertValidCredentialResponse(r, ref) # Assert credential id is same as hash of access key id access = blob['access'].encode('utf-8') - self.assertEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) + self.assertEqual( + hashlib.sha256(access).hexdigest(), r.result['credential']['id'] + ) cred_blob = json.loads(r.result['credential']['blob']) self.assertEqual(blob, cred_blob) - self._test_get_token(access=cred_blob['access'], - secret=cred_blob['secret']) + self._test_get_token( + access=cred_blob['access'], secret=cred_blob['secret'] + ) def test_ec2_credential_signature_validate_legacy(self): """Test signature validation with a legacy v3 ec2 credential.""" cred_json, _ = self._create_dict_blob_credential() cred_blob = json.loads(cred_json) - self._test_get_token(access=cred_blob['access'], - secret=cred_blob['secret']) + self._test_get_token( + access=cred_blob['access'], secret=cred_blob['secret'] + ) def _get_ec2_cred_uri(self): return '/users/%s/credentials/OS-EC2' % self.user_id @@ -925,26 +1002,25 @@ class TestCredentialEc2(CredentialBaseTestCase): self.assertEqual(self.user_id, ec2_cred['user_id']) self.assertEqual(self.project_id, ec2_cred['tenant_id']) self.assertIsNone(ec2_cred['trust_id']) - self._test_get_token(access=ec2_cred['access'], - secret=ec2_cred['secret']) + self._test_get_token( + access=ec2_cred['access'], secret=ec2_cred['secret'] + ) uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']]) - self.assertThat(ec2_cred['links']['self'], - matchers.EndsWith(uri)) + self.assertThat(ec2_cred['links']['self'], matchers.EndsWith(uri)) def test_ec2_get_credential(self): ec2_cred = self._get_ec2_cred() uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']]) r = self.get(uri) self.assertDictEqual(ec2_cred, r.result['credential']) - self.assertThat(ec2_cred['links']['self'], - matchers.EndsWith(uri)) + self.assertThat(ec2_cred['links']['self'], matchers.EndsWith(uri)) def test_ec2_cannot_get_non_ec2_credential(self): access_key = uuid.uuid4().hex cred_id = utils.hash_access_key(access_key) non_ec2_cred = unit.new_credential_ref( - user_id=self.user_id, - project_id=self.project_id) + user_id=self.user_id, project_id=self.project_id + ) non_ec2_cred['id'] = cred_id PROVIDERS.credential_api.create_credential(cred_id, non_ec2_cred) uri = '/'.join([self._get_ec2_cred_uri(), access_key]) @@ -959,13 +1035,12 @@ class TestCredentialEc2(CredentialBaseTestCase): r = self.get(uri) cred_list = r.result['credentials'] self.assertEqual(1, len(cred_list)) - self.assertThat(r.result['links']['self'], - matchers.EndsWith(uri)) + self.assertThat(r.result['links']['self'], matchers.EndsWith(uri)) # non-EC2 credentials won't be fetched non_ec2_cred = unit.new_credential_ref( - user_id=self.user_id, - project_id=self.project_id) + user_id=self.user_id, project_id=self.project_id + ) non_ec2_cred['type'] = uuid.uuid4().hex PROVIDERS.credential_api.create_credential( non_ec2_cred['id'], non_ec2_cred @@ -981,10 +1056,14 @@ class TestCredentialEc2(CredentialBaseTestCase): ec2_cred = self._get_ec2_cred() uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']]) cred_from_credential_api = ( - PROVIDERS.credential_api - .list_credentials_for_user(self.user_id, type=CRED_TYPE_EC2)) + PROVIDERS.credential_api.list_credentials_for_user( + self.user_id, type=CRED_TYPE_EC2 + ) + ) self.assertEqual(1, len(cred_from_credential_api)) self.delete(uri) - self.assertRaises(exception.CredentialNotFound, - PROVIDERS.credential_api.get_credential, - cred_from_credential_api[0]['id']) + self.assertRaises( + exception.CredentialNotFound, + PROVIDERS.credential_api.get_credential, + cred_from_credential_api[0]['id'], + ) diff --git a/keystone/tests/unit/test_v3_domain_config.py b/keystone/tests/unit/test_v3_domain_config.py index 886dc18e11..8ac9c2b613 100644 --- a/keystone/tests/unit/test_v3_domain_config.py +++ b/keystone/tests/unit/test_v3_domain_config.py @@ -34,16 +34,24 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(self.domain['id'], self.domain) - self.config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + self.config = { + 'ldap': { + 'url': uuid.uuid4().hex, + 'user_tree_dn': uuid.uuid4().hex, + }, + 'identity': {'driver': uuid.uuid4().hex}, + } def test_create_config(self): """Call ``PUT /domains/{domain_id}/config``.""" url = '/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']} - r = self.put(url, body={'config': self.config}, - expected_status=http.client.CREATED) + 'domain_id': self.domain['id'] + } + r = self.put( + url, + body={'config': self.config}, + expected_status=http.client.CREATED, + ) res = PROVIDERS.domain_config_api.get_config(self.domain['id']) self.assertEqual(self.config, r.result['config']) self.assertEqual(self.config, res) @@ -57,31 +65,39 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): """ invalid_domain_id = uuid.uuid4().hex url = '/domains/%(domain_id)s/config' % { - 'domain_id': invalid_domain_id} - self.put(url, body={'config': self.config}, - expected_status=exception.DomainNotFound.code) + 'domain_id': invalid_domain_id + } + self.put( + url, + body={'config': self.config}, + expected_status=exception.DomainNotFound.code, + ) def test_create_config_twice(self): """Check multiple creates don't throw error.""" - self.put('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, + self.put( + '/domains/%(domain_id)s/config' % {'domain_id': self.domain['id']}, body={'config': self.config}, - expected_status=http.client.CREATED) - self.put('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, + expected_status=http.client.CREATED, + ) + self.put( + '/domains/%(domain_id)s/config' % {'domain_id': self.domain['id']}, body={'config': self.config}, - expected_status=http.client.OK) + expected_status=http.client.OK, + ) def test_delete_config(self): """Call ``DELETE /domains{domain_id}/config``.""" PROVIDERS.domain_config_api.create_config( self.domain['id'], self.config ) - self.delete('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}) - self.get('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, - expected_status=exception.DomainConfigNotFound.code) + self.delete( + '/domains/%(domain_id)s/config' % {'domain_id': self.domain['id']} + ) + self.get( + '/domains/%(domain_id)s/config' % {'domain_id': self.domain['id']}, + expected_status=exception.DomainConfigNotFound.code, + ) def test_delete_config_invalid_domain(self): """Call ``DELETE /domains{domain_id}/config``. @@ -94,17 +110,20 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) invalid_domain_id = uuid.uuid4().hex - self.delete('/domains/%(domain_id)s/config' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) + self.delete( + '/domains/%(domain_id)s/config' % {'domain_id': invalid_domain_id}, + expected_status=exception.DomainNotFound.code, + ) def test_delete_config_by_group(self): """Call ``DELETE /domains{domain_id}/config/{group}``.""" PROVIDERS.domain_config_api.create_config( self.domain['id'], self.config ) - self.delete('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': self.domain['id']}) + self.delete( + '/domains/%(domain_id)s/config/ldap' + % {'domain_id': self.domain['id']} + ) res = PROVIDERS.domain_config_api.get_config(self.domain['id']) self.assertNotIn('ldap', res) @@ -119,9 +138,11 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) invalid_domain_id = uuid.uuid4().hex - self.delete('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) + self.delete( + '/domains/%(domain_id)s/config/ldap' + % {'domain_id': invalid_domain_id}, + expected_status=exception.DomainNotFound.code, + ) def test_get_head_config(self): """Call ``GET & HEAD for /domains{domain_id}/config``.""" @@ -129,7 +150,8 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) url = '/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']} + 'domain_id': self.domain['id'] + } r = self.get(url) self.assertEqual(self.config, r.result['config']) self.head(url, expected_status=http.client.OK) @@ -140,7 +162,8 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) url = '/domains/%(domain_id)s/config/ldap' % { - 'domain_id': self.domain['id']} + 'domain_id': self.domain['id'] + } r = self.get(url) self.assertEqual({'ldap': self.config['ldap']}, r.result['config']) self.head(url, expected_status=http.client.OK) @@ -156,9 +179,9 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) invalid_domain_id = uuid.uuid4().hex - url = ('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': invalid_domain_id} - ) + url = '/domains/%(domain_id)s/config/ldap' % { + 'domain_id': invalid_domain_id + } self.get(url, expected_status=exception.DomainNotFound.code) self.head(url, expected_status=exception.DomainNotFound.code) @@ -168,10 +191,12 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) url = '/domains/%(domain_id)s/config/ldap/url' % { - 'domain_id': self.domain['id']} + 'domain_id': self.domain['id'] + } r = self.get(url) - self.assertEqual({'url': self.config['ldap']['url']}, - r.result['config']) + self.assertEqual( + {'url': self.config['ldap']['url']}, r.result['config'] + ) self.head(url, expected_status=http.client.OK) def test_get_head_config_by_option_invalid_domain(self): @@ -185,17 +210,17 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) invalid_domain_id = uuid.uuid4().hex - url = ('/domains/%(domain_id)s/config/ldap/url' % { - 'domain_id': invalid_domain_id} - ) + url = '/domains/%(domain_id)s/config/ldap/url' % { + 'domain_id': invalid_domain_id + } self.get(url, expected_status=exception.DomainNotFound.code) self.head(url, expected_status=exception.DomainNotFound.code) def test_get_head_non_existant_config(self): """Call ``GET /domains{domain_id}/config when no config defined``.""" - url = ('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']} - ) + url = '/domains/%(domain_id)s/config' % { + 'domain_id': self.domain['id'] + } self.get(url, expected_status=http.client.NOT_FOUND) self.head(url, expected_status=http.client.NOT_FOUND) @@ -207,9 +232,9 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): response 404 domain not found. """ invalid_domain_id = uuid.uuid4().hex - url = ('/domains/%(domain_id)s/config' % { - 'domain_id': invalid_domain_id} - ) + url = '/domains/%(domain_id)s/config' % { + 'domain_id': invalid_domain_id + } self.get(url, expected_status=exception.DomainNotFound.code) self.head(url, expected_status=exception.DomainNotFound.code) @@ -217,9 +242,9 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): """Call ``GET /domains/{domain_id}/config/{group_not_exist}``.""" config = {'ldap': {'url': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config(self.domain['id'], config) - url = ('/domains/%(domain_id)s/config/identity' % { - 'domain_id': self.domain['id']} - ) + url = '/domains/%(domain_id)s/config/identity' % { + 'domain_id': self.domain['id'] + } self.get(url, expected_status=http.client.NOT_FOUND) self.head(url, expected_status=http.client.NOT_FOUND) @@ -233,9 +258,9 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): config = {'ldap': {'url': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config(self.domain['id'], config) invalid_domain_id = uuid.uuid4().hex - url = ('/domains/%(domain_id)s/config/identity' % { - 'domain_id': invalid_domain_id} - ) + url = '/domains/%(domain_id)s/config/identity' % { + 'domain_id': invalid_domain_id + } self.get(url, expected_status=exception.DomainNotFound.code) self.head(url, expected_status=exception.DomainNotFound.code) @@ -248,9 +273,9 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): """ config = {'ldap': {'url': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config(self.domain['id'], config) - url = ('/domains/%(domain_id)s/config/ldap/user_tree_dn' % { - 'domain_id': self.domain['id']} - ) + url = '/domains/%(domain_id)s/config/ldap/user_tree_dn' % { + 'domain_id': self.domain['id'] + } self.get(url, expected_status=http.client.NOT_FOUND) self.head(url, expected_status=http.client.NOT_FOUND) @@ -266,9 +291,9 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): config = {'ldap': {'url': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config(self.domain['id'], config) invalid_domain_id = uuid.uuid4().hex - url = ('/domains/%(domain_id)s/config/ldap/user_tree_dn' % { - 'domain_id': invalid_domain_id} - ) + url = '/domains/%(domain_id)s/config/ldap/user_tree_dn' % { + 'domain_id': invalid_domain_id + } self.get(url, expected_status=exception.DomainNotFound.code) self.head(url, expected_status=exception.DomainNotFound.code) @@ -277,16 +302,20 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): PROVIDERS.domain_config_api.create_config( self.domain['id'], self.config ) - new_config = {'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - r = self.patch('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, - body={'config': new_config}) + new_config = { + 'ldap': {'url': uuid.uuid4().hex}, + 'identity': {'driver': uuid.uuid4().hex}, + } + r = self.patch( + '/domains/%(domain_id)s/config' % {'domain_id': self.domain['id']}, + body={'config': new_config}, + ) res = PROVIDERS.domain_config_api.get_config(self.domain['id']) expected_config = copy.deepcopy(self.config) expected_config['ldap']['url'] = new_config['ldap']['url'] - expected_config['identity']['driver'] = ( - new_config['identity']['driver']) + expected_config['identity']['driver'] = new_config['identity'][ + 'driver' + ] self.assertEqual(expected_config, r.result['config']) self.assertEqual(expected_config, res) @@ -300,29 +329,36 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): PROVIDERS.domain_config_api.create_config( self.domain['id'], self.config ) - new_config = {'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} + new_config = { + 'ldap': {'url': uuid.uuid4().hex}, + 'identity': {'driver': uuid.uuid4().hex}, + } invalid_domain_id = uuid.uuid4().hex - self.patch('/domains/%(domain_id)s/config' % { - 'domain_id': invalid_domain_id}, + self.patch( + '/domains/%(domain_id)s/config' % {'domain_id': invalid_domain_id}, body={'config': new_config}, - expected_status=exception.DomainNotFound.code) + expected_status=exception.DomainNotFound.code, + ) def test_update_config_group(self): """Call ``PATCH /domains/{domain_id}/config/{group}``.""" PROVIDERS.domain_config_api.create_config( self.domain['id'], self.config ) - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} - r = self.patch('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': self.domain['id']}, - body={'config': new_config}) + new_config = { + 'ldap': {'url': uuid.uuid4().hex, 'user_filter': uuid.uuid4().hex} + } + r = self.patch( + '/domains/%(domain_id)s/config/ldap' + % {'domain_id': self.domain['id']}, + body={'config': new_config}, + ) res = PROVIDERS.domain_config_api.get_config(self.domain['id']) expected_config = copy.deepcopy(self.config) expected_config['ldap']['url'] = new_config['ldap']['url'] - expected_config['ldap']['user_filter'] = ( - new_config['ldap']['user_filter']) + expected_config['ldap']['user_filter'] = new_config['ldap'][ + 'user_filter' + ] self.assertEqual(expected_config, r.result['config']) self.assertEqual(expected_config, res) @@ -336,13 +372,16 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): PROVIDERS.domain_config_api.create_config( self.domain['id'], self.config ) - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} + new_config = { + 'ldap': {'url': uuid.uuid4().hex, 'user_filter': uuid.uuid4().hex} + } invalid_domain_id = uuid.uuid4().hex - self.patch('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': invalid_domain_id}, + self.patch( + '/domains/%(domain_id)s/config/ldap' + % {'domain_id': invalid_domain_id}, body={'config': new_config}, - expected_status=exception.DomainNotFound.code) + expected_status=exception.DomainNotFound.code, + ) def test_update_config_invalid_group(self): """Call ``PATCH /domains/{domain_id}/config/{invalid_group}``.""" @@ -353,21 +392,29 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): # Trying to update a group that is neither whitelisted or sensitive # should result in Forbidden. invalid_group = uuid.uuid4().hex - new_config = {invalid_group: {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} - self.patch('/domains/%(domain_id)s/config/%(invalid_group)s' % { - 'domain_id': self.domain['id'], 'invalid_group': invalid_group}, + new_config = { + invalid_group: { + 'url': uuid.uuid4().hex, + 'user_filter': uuid.uuid4().hex, + } + } + self.patch( + '/domains/%(domain_id)s/config/%(invalid_group)s' + % {'domain_id': self.domain['id'], 'invalid_group': invalid_group}, body={'config': new_config}, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) # Trying to update a valid group, but one that is not in the current # config should result in NotFound config = {'ldap': {'suffix': uuid.uuid4().hex}} PROVIDERS.domain_config_api.create_config(self.domain['id'], config) new_config = {'identity': {'driver': uuid.uuid4().hex}} - self.patch('/domains/%(domain_id)s/config/identity' % { - 'domain_id': self.domain['id']}, + self.patch( + '/domains/%(domain_id)s/config/identity' + % {'domain_id': self.domain['id']}, body={'config': new_config}, - expected_status=http.client.NOT_FOUND) + expected_status=http.client.NOT_FOUND, + ) def test_update_config_invalid_group_invalid_domain(self): """Call ``PATCH /domains/{domain_id}/config/{invalid_group}``. @@ -380,14 +427,19 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) invalid_group = uuid.uuid4().hex - new_config = {invalid_group: {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} + new_config = { + invalid_group: { + 'url': uuid.uuid4().hex, + 'user_filter': uuid.uuid4().hex, + } + } invalid_domain_id = uuid.uuid4().hex - self.patch('/domains/%(domain_id)s/config/%(invalid_group)s' % { - 'domain_id': invalid_domain_id, - 'invalid_group': invalid_group}, + self.patch( + '/domains/%(domain_id)s/config/%(invalid_group)s' + % {'domain_id': invalid_domain_id, 'invalid_group': invalid_group}, body={'config': new_config}, - expected_status=exception.DomainNotFound.code) + expected_status=exception.DomainNotFound.code, + ) def test_update_config_option(self): """Call ``PATCH /domains/{domain_id}/config/{group}/{option}``.""" @@ -395,9 +447,11 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): self.domain['id'], self.config ) new_config = {'url': uuid.uuid4().hex} - r = self.patch('/domains/%(domain_id)s/config/ldap/url' % { - 'domain_id': self.domain['id']}, - body={'config': new_config}) + r = self.patch( + '/domains/%(domain_id)s/config/ldap/url' + % {'domain_id': self.domain['id']}, + body={'config': new_config}, + ) res = PROVIDERS.domain_config_api.get_config(self.domain['id']) expected_config = copy.deepcopy(self.config) expected_config['ldap']['url'] = new_config['url'] @@ -416,10 +470,12 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): ) new_config = {'url': uuid.uuid4().hex} invalid_domain_id = uuid.uuid4().hex - self.patch('/domains/%(domain_id)s/config/ldap/url' % { - 'domain_id': invalid_domain_id}, + self.patch( + '/domains/%(domain_id)s/config/ldap/url' + % {'domain_id': invalid_domain_id}, body={'config': new_config}, - expected_status=exception.DomainNotFound.code) + expected_status=exception.DomainNotFound.code, + ) def test_update_config_invalid_option(self): """Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}``.""" @@ -431,19 +487,23 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): # Trying to update an option that is neither whitelisted or sensitive # should result in Forbidden. self.patch( - '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' % { + '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' + % { 'domain_id': self.domain['id'], - 'invalid_option': invalid_option}, + 'invalid_option': invalid_option, + }, body={'config': new_config}, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) # Trying to update a valid option, but one that is not in the current # config should result in NotFound new_config = {'suffix': uuid.uuid4().hex} self.patch( - '/domains/%(domain_id)s/config/ldap/suffix' % { - 'domain_id': self.domain['id']}, + '/domains/%(domain_id)s/config/ldap/suffix' + % {'domain_id': self.domain['id']}, body={'config': new_config}, - expected_status=http.client.NOT_FOUND) + expected_status=http.client.NOT_FOUND, + ) def test_update_config_invalid_option_invalid_domain(self): """Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}``. @@ -459,11 +519,14 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): new_config = {'ldap': {invalid_option: uuid.uuid4().hex}} invalid_domain_id = uuid.uuid4().hex self.patch( - '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' % { + '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' + % { 'domain_id': invalid_domain_id, - 'invalid_option': invalid_option}, + 'invalid_option': invalid_option, + }, body={'config': new_config}, - expected_status=exception.DomainNotFound.code) + expected_status=exception.DomainNotFound.code, + ) def test_get_head_config_default(self): """Call ``GET & HEAD /domains/config/default``.""" @@ -477,8 +540,10 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): default_config = r.result['config'] for group in default_config: for option in default_config[group]: - self.assertEqual(getattr(getattr(CONF, group), option), - default_config[group][option]) + self.assertEqual( + getattr(getattr(CONF, group), option), + default_config[group][option], + ) self.head(url, expected_status=http.client.OK) def test_get_head_config_default_by_group(self): @@ -492,8 +557,9 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): r = self.get(url) default_config = r.result['config'] for option in default_config['ldap']: - self.assertEqual(getattr(CONF.ldap, option), - default_config['ldap'][option]) + self.assertEqual( + getattr(CONF.ldap, option), default_config['ldap'][option] + ) self.head(url, expected_status=http.client.OK) def test_get_head_config_default_by_option(self): @@ -512,10 +578,14 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): def test_get_head_config_default_by_invalid_group(self): """Call ``GET & HEAD for /domains/config/{bad-group}/default``.""" # First try a valid group, but one we don't support for domain config - self.get('/domains/config/resource/default', - expected_status=http.client.FORBIDDEN) - self.head('/domains/config/resource/default', - expected_status=http.client.FORBIDDEN) + self.get( + '/domains/config/resource/default', + expected_status=http.client.FORBIDDEN, + ) + self.head( + '/domains/config/resource/default', + expected_status=http.client.FORBIDDEN, + ) # Now try a totally invalid group url = '/domains/config/%s/default' % uuid.uuid4().hex @@ -527,10 +597,14 @@ class DomainConfigTestCase(test_v3.RestfulTestCase): # groups that the domain configuration API backlists explicitly. Doing # so would be a security vulnerability because it would leak sensitive # information over the API. - self.get('/domains/config/ldap/password/default', - expected_status=http.client.FORBIDDEN) - self.head('/domains/config/ldap/password/default', - expected_status=http.client.FORBIDDEN) + self.get( + '/domains/config/ldap/password/default', + expected_status=http.client.FORBIDDEN, + ) + self.head( + '/domains/config/ldap/password/default', + expected_status=http.client.FORBIDDEN, + ) def test_get_head_config_default_for_invalid_option(self): """Returning invalid configuration options is invalid.""" @@ -546,14 +620,12 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): # Create a user in the default domain self.non_admin_user = unit.create_user( - PROVIDERS.identity_api, - CONF.identity.default_domain_id + PROVIDERS.identity_api, CONF.identity.default_domain_id ) # Create an admin in the default domain self.admin_user = unit.create_user( - PROVIDERS.identity_api, - CONF.identity.default_domain_id + PROVIDERS.identity_api, CONF.identity.default_domain_id ) # Create a project in the default domain and a non-admin role @@ -563,30 +635,25 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): PROVIDERS.resource_api.create_project(self.project['id'], self.project) self.non_admin_role = unit.new_role_ref(name='not_admin') PROVIDERS.role_api.create_role( - self.non_admin_role['id'], - self.non_admin_role + self.non_admin_role['id'], self.non_admin_role ) # Give the non-admin user a role on the project PROVIDERS.assignment_api.add_role_to_user_and_project( - self.non_admin_user['id'], - self.project['id'], - self.role['id'] + self.non_admin_user['id'], self.project['id'], self.role['id'] ) # Give the user the admin role on the project, which is technically # `self.role` because RestfulTestCase sets that up for us. PROVIDERS.assignment_api.add_role_to_user_and_project( - self.admin_user['id'], - self.project['id'], - self.role_id + self.admin_user['id'], self.project['id'], self.role_id ) def _get_non_admin_token(self): non_admin_auth_data = self.build_authentication_request( user_id=self.non_admin_user['id'], password=self.non_admin_user['password'], - project_id=self.project['id'] + project_id=self.project['id'], ) return self.get_requested_token(non_admin_auth_data) @@ -594,7 +661,7 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): non_admin_auth_data = self.build_authentication_request( user_id=self.admin_user['id'], password=self.admin_user['password'], - project_id=self.project['id'] + project_id=self.project['id'], ) return self.get_requested_token(non_admin_auth_data) @@ -609,26 +676,22 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): password_regex = uuid.uuid4().hex password_regex_description = uuid.uuid4().hex self.config_fixture.config( - group='security_compliance', - password_regex=password_regex + group='security_compliance', password_regex=password_regex ) self.config_fixture.config( group='security_compliance', - password_regex_description=password_regex_description + password_regex_description=password_regex_description, ) expected_response = { 'security_compliance': { 'password_regex': password_regex, - 'password_regex_description': password_regex_description + 'password_regex_description': password_regex_description, } } - url = ( - '/domains/%(domain_id)s/config/%(group)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': 'security_compliance', - } - ) + url = '/domains/%(domain_id)s/config/%(group)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': 'security_compliance', + } # Make sure regular users and administrators can get security # requirement information. @@ -641,12 +704,10 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): self.head( url, token=self._get_non_admin_token(), - expected_status=http.client.OK + expected_status=http.client.OK, ) self.head( - url, - token=self._get_admin_token(), - expected_status=http.client.OK + url, token=self._get_admin_token(), expected_status=http.client.OK ) def test_get_security_compliance_config_for_non_default_domain_fails(self): @@ -664,44 +725,40 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): password_regex = uuid.uuid4().hex password_regex_description = uuid.uuid4().hex self.config_fixture.config( - group='security_compliance', - password_regex=password_regex + group='security_compliance', password_regex=password_regex ) self.config_fixture.config( group='security_compliance', - password_regex_description=password_regex_description - ) - url = ( - '/domains/%(domain_id)s/config/%(group)s' % - { - 'domain_id': domain['id'], - 'group': 'security_compliance', - } + password_regex_description=password_regex_description, ) + url = '/domains/%(domain_id)s/config/%(group)s' % { + 'domain_id': domain['id'], + 'group': 'security_compliance', + } # Make sure regular users and administrators are forbidden from doing # this. self.get( url, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.get( url, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) # Ensure HEAD requests behave the same way self.head( url, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.head( url, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_get_non_whitelisted_security_compliance_opt_fails(self): @@ -713,84 +770,72 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): """ # Set a security compliance configuration that isn't whitelisted self.config_fixture.config( - group='security_compliance', - lockout_failure_attempts=1 - ) - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': 'security_compliance', - 'option': 'lockout_failure_attempts' - } + group='security_compliance', lockout_failure_attempts=1 ) + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': 'security_compliance', + 'option': 'lockout_failure_attempts', + } # Make sure regular users and administrators are unable to ask for # sensitive information. self.get( url, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.get( url, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) # Ensure HEAD requests behave the same way self.head( url, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.head( url, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_get_security_compliance_password_regex(self): """Ask for the security compliance password regular expression.""" password_regex = uuid.uuid4().hex self.config_fixture.config( - group='security_compliance', - password_regex=password_regex + group='security_compliance', password_regex=password_regex ) group = 'security_compliance' option = 'password_regex' - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': group, - 'option': option - } - ) + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': group, + 'option': option, + } # Make sure regular users and administrators can ask for the # password regular expression. regular_response = self.get(url, token=self._get_non_admin_token()) self.assertEqual( - regular_response.result['config'][option], - password_regex + regular_response.result['config'][option], password_regex ) admin_response = self.get(url, token=self._get_admin_token()) self.assertEqual( - admin_response.result['config'][option], - password_regex + admin_response.result['config'][option], password_regex ) # Ensure HEAD requests behave the same way self.head( url, token=self._get_non_admin_token(), - expected_status=http.client.OK + expected_status=http.client.OK, ) self.head( - url, - token=self._get_admin_token(), - expected_status=http.client.OK + url, token=self._get_admin_token(), expected_status=http.client.OK ) def test_get_security_compliance_password_regex_description(self): @@ -798,56 +843,47 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): password_regex_description = uuid.uuid4().hex self.config_fixture.config( group='security_compliance', - password_regex_description=password_regex_description + password_regex_description=password_regex_description, ) group = 'security_compliance' option = 'password_regex_description' - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': group, - 'option': option - } - ) + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': group, + 'option': option, + } # Make sure regular users and administrators can ask for the # password regular expression. regular_response = self.get(url, token=self._get_non_admin_token()) self.assertEqual( regular_response.result['config'][option], - password_regex_description + password_regex_description, ) admin_response = self.get(url, token=self._get_admin_token()) self.assertEqual( - admin_response.result['config'][option], - password_regex_description + admin_response.result['config'][option], password_regex_description ) # Ensure HEAD requests behave the same way self.head( url, token=self._get_non_admin_token(), - expected_status=http.client.OK + expected_status=http.client.OK, ) self.head( - url, - token=self._get_admin_token(), - expected_status=http.client.OK + url, token=self._get_admin_token(), expected_status=http.client.OK ) def test_get_security_compliance_password_regex_returns_none(self): """When an option isn't set, we should explicitly return None.""" group = 'security_compliance' option = 'password_regex' - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': group, - 'option': option - } - ) + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': group, + 'option': option, + } # Make sure regular users and administrators can ask for the password # regular expression, but since it isn't set the returned value should @@ -861,26 +897,21 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): self.head( url, token=self._get_non_admin_token(), - expected_status=http.client.OK + expected_status=http.client.OK, ) self.head( - url, - token=self._get_admin_token(), - expected_status=http.client.OK + url, token=self._get_admin_token(), expected_status=http.client.OK ) def test_get_security_compliance_password_regex_desc_returns_none(self): """When an option isn't set, we should explicitly return None.""" group = 'security_compliance' option = 'password_regex_description' - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': group, - 'option': option - } - ) + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': group, + 'option': option, + } # Make sure regular users and administrators can ask for the password # regular expression description, but since it isn't set the returned @@ -894,12 +925,10 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): self.head( url, token=self._get_non_admin_token(), - expected_status=http.client.OK + expected_status=http.client.OK, ) self.head( - url, - token=self._get_admin_token(), - expected_status=http.client.OK + url, token=self._get_admin_token(), expected_status=http.client.OK ) def test_get_security_compliance_config_with_user_from_other_domain(self): @@ -925,9 +954,7 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): # Give the new user a non-admin role on the project PROVIDERS.assignment_api.add_role_to_user_and_project( - user['id'], - project['id'], - self.non_admin_role['id'] + user['id'], project['id'], self.non_admin_role['id'] ) # Set our security compliance config values, we do this after we've @@ -936,13 +963,9 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): password_regex = uuid.uuid4().hex password_regex_description = uuid.uuid4().hex group = 'security_compliance' + self.config_fixture.config(group=group, password_regex=password_regex) self.config_fixture.config( - group=group, - password_regex=password_regex - ) - self.config_fixture.config( - group=group, - password_regex_description=password_regex_description + group=group, password_regex_description=password_regex_description ) # Get a token for the newly created user scoped to the project in the @@ -951,32 +974,24 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): user_token = self.build_authentication_request( user_id=user['id'], password=user['password'], - project_id=project['id'] + project_id=project['id'], ) user_token = self.get_requested_token(user_token) - url = ( - '/domains/%(domain_id)s/config/%(group)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': group, - } - ) + url = '/domains/%(domain_id)s/config/%(group)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': group, + } response = self.get(url, token=user_token) self.assertEqual( - response.result['config'][group]['password_regex'], - password_regex + response.result['config'][group]['password_regex'], password_regex ) self.assertEqual( response.result['config'][group]['password_regex_description'], - password_regex_description + password_regex_description, ) # Ensure HEAD requests behave the same way - self.head( - url, - token=user_token, - expected_status=http.client.OK - ) + self.head(url, token=user_token, expected_status=http.client.OK) def test_update_security_compliance_config_group_fails(self): """Make sure that updates to the entire security group section fail. @@ -988,16 +1003,13 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): new_config = { 'security_compliance': { 'password_regex': uuid.uuid4().hex, - 'password_regex_description': uuid.uuid4().hex + 'password_regex_description': uuid.uuid4().hex, } } - url = ( - '/domains/%(domain_id)s/config/%(group)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': 'security_compliance', - } - ) + url = '/domains/%(domain_id)s/config/%(group)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': 'security_compliance', + } # Make sure regular users and administrators aren't allowed to modify # security compliance configuration through the API. @@ -1005,32 +1017,25 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): url, body={'config': new_config}, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.patch( url, body={'config': new_config}, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_update_security_compliance_password_regex_fails(self): """Make sure any updates to security compliance options fail.""" group = 'security_compliance' option = 'password_regex' - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': group, - 'option': option - } - ) - new_config = { - group: { - option: uuid.uuid4().hex - } + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': group, + 'option': option, } + new_config = {group: {option: uuid.uuid4().hex}} # Make sure regular users and administrators aren't allowed to modify # security compliance configuration through the API. @@ -1038,32 +1043,25 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): url, body={'config': new_config}, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.patch( url, body={'config': new_config}, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_update_security_compliance_password_regex_description_fails(self): """Make sure any updates to security compliance options fail.""" group = 'security_compliance' option = 'password_regex_description' - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': group, - 'option': option - } - ) - new_config = { - group: { - option: uuid.uuid4().hex - } + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': group, + 'option': option, } + new_config = {group: {option: uuid.uuid4().hex}} # Make sure regular users and administrators aren't allowed to modify # security compliance configuration through the API. @@ -1071,13 +1069,13 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): url, body={'config': new_config}, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.patch( url, body={'config': new_config}, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_update_non_whitelisted_security_compliance_option_fails(self): @@ -1089,19 +1087,12 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): """ group = 'security_compliance' option = 'lockout_failure_attempts' - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': group, - 'option': option - } - ) - new_config = { - group: { - option: 1 - } + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': group, + 'option': option, } + new_config = {group: {option: 1}} # Make sure this behavior is not possible for regular users or # administrators. @@ -1109,106 +1100,94 @@ class SecurityRequirementsTestCase(test_v3.RestfulTestCase): url, body={'config': new_config}, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.patch( url, body={'config': new_config}, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_delete_security_compliance_group_fails(self): """The security compliance group shouldn't be deleteable.""" - url = ( - '/domains/%(domain_id)s/config/%(group)s/' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': 'security_compliance', - } - ) + url = '/domains/%(domain_id)s/config/%(group)s/' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': 'security_compliance', + } # Make sure regular users and administrators can't delete the security # compliance configuration group. self.delete( url, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.delete( url, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_delete_security_compliance_password_regex_fails(self): """The security compliance options shouldn't be deleteable.""" - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': 'security_compliance', - 'option': 'password_regex' - } - ) + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': 'security_compliance', + 'option': 'password_regex', + } # Make sure regular users and administrators can't delete the security # compliance configuration group. self.delete( url, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.delete( url, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_delete_security_compliance_password_regex_description_fails(self): """The security compliance options shouldn't be deleteable.""" - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': 'security_compliance', - 'option': 'password_regex_description' - } - ) + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': 'security_compliance', + 'option': 'password_regex_description', + } # Make sure regular users and administrators can't delete the security # compliance configuration group. self.delete( url, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.delete( url, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) def test_delete_non_whitelisted_security_compliance_options_fails(self): """The security compliance options shouldn't be deleteable.""" - url = ( - '/domains/%(domain_id)s/config/%(group)s/%(option)s' % - { - 'domain_id': CONF.identity.default_domain_id, - 'group': 'security_compliance', - 'option': 'lockout_failure_attempts' - } - ) + url = '/domains/%(domain_id)s/config/%(group)s/%(option)s' % { + 'domain_id': CONF.identity.default_domain_id, + 'group': 'security_compliance', + 'option': 'lockout_failure_attempts', + } # Make sure regular users and administrators can't delete the security # compliance configuration group. self.delete( url, expected_status=http.client.FORBIDDEN, - token=self._get_non_admin_token() + token=self._get_non_admin_token(), ) self.delete( url, expected_status=http.client.FORBIDDEN, - token=self._get_admin_token() + token=self._get_admin_token(), ) diff --git a/keystone/tests/unit/test_v3_endpoint_policy.py b/keystone/tests/unit/test_v3_endpoint_policy.py index a61fd5e2a2..48f9d8868b 100644 --- a/keystone/tests/unit/test_v3_endpoint_policy.py +++ b/keystone/tests/unit/test_v3_endpoint_policy.py @@ -39,9 +39,12 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase): PROVIDERS.policy_api.create_policy(self.policy['id'], self.policy) self.service = unit.new_service_ref() PROVIDERS.catalog_api.create_service(self.service['id'], self.service) - self.endpoint = unit.new_endpoint_ref(self.service['id'], enabled=True, - interface='public', - region_id=self.region_id) + self.endpoint = unit.new_endpoint_ref( + self.service['id'], + enabled=True, + interface='public', + region_id=self.region_id, + ) PROVIDERS.catalog_api.create_endpoint( self.endpoint['id'], self.endpoint ) @@ -58,71 +61,82 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase): # that there is not a false negative after creation. self.assert_head_and_get_return_same_response( - url, - expected_status=http.client.NOT_FOUND) + url, expected_status=http.client.NOT_FOUND + ) self.put(url) # test that the new resource is accessible. self.assert_head_and_get_return_same_response( - url, - expected_status=http.client.NO_CONTENT) + url, expected_status=http.client.NO_CONTENT + ) self.delete(url) # test that the deleted resource is no longer accessible self.assert_head_and_get_return_same_response( - url, - expected_status=http.client.NOT_FOUND) + url, expected_status=http.client.NOT_FOUND + ) def test_crud_for_policy_for_explicit_endpoint(self): """PUT, HEAD and DELETE for explicit endpoint policy.""" - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints/%(endpoint_id)s') % { - 'policy_id': self.policy['id'], - 'endpoint_id': self.endpoint['id']} + url = ( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/endpoints/%(endpoint_id)s' + ) % { + 'policy_id': self.policy['id'], + 'endpoint_id': self.endpoint['id'], + } self._crud_test(url) def test_crud_for_policy_for_service(self): """PUT, HEAD and DELETE for service endpoint policy.""" - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id']} + url = ( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/services/%(service_id)s' + ) % {'policy_id': self.policy['id'], 'service_id': self.service['id']} self._crud_test(url) def test_crud_for_policy_for_region_and_service(self): """PUT, HEAD and DELETE for region and service endpoint policy.""" - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s/regions/%(region_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id'], - 'region_id': self.region['id']} + url = ( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/services/%(service_id)s/regions/%(region_id)s' + ) % { + 'policy_id': self.policy['id'], + 'service_id': self.service['id'], + 'region_id': self.region['id'], + } self._crud_test(url) def test_get_policy_for_endpoint(self): """GET /endpoints/{endpoint_id}/policy.""" - self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints/%(endpoint_id)s' % { - 'policy_id': self.policy['id'], - 'endpoint_id': self.endpoint['id']}) + self.put( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/endpoints/%(endpoint_id)s' + % { + 'policy_id': self.policy['id'], + 'endpoint_id': self.endpoint['id'], + } + ) - self.head('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY' - '/policy' % { - 'endpoint_id': self.endpoint['id']}, - expected_status=http.client.OK) + self.head( + '/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY' + '/policy' % {'endpoint_id': self.endpoint['id']}, + expected_status=http.client.OK, + ) - r = self.get('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY' - '/policy' % { - 'endpoint_id': self.endpoint['id']}) + r = self.get( + '/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY' + '/policy' % {'endpoint_id': self.endpoint['id']} + ) self.assertValidPolicyResponse(r, ref=self.policy) def test_list_endpoints_for_policy(self): """GET & HEAD /policies/%(policy_id}/endpoints.""" - url = ( - '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints' % {'policy_id': self.policy['id']} - ) + url = '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' '/endpoints' % { + 'policy_id': self.policy['id'] + } self.put(url + '/' + self.endpoint['id']) r = self.get(url) self.assertValidEndpointListResponse(r, ref=self.endpoint) @@ -130,118 +144,140 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase): self.head(url, expected_status=http.client.OK) def test_endpoint_association_cleanup_when_endpoint_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints/%(endpoint_id)s') % { - 'policy_id': self.policy['id'], - 'endpoint_id': self.endpoint['id']} + url = ( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/endpoints/%(endpoint_id)s' + ) % { + 'policy_id': self.policy['id'], + 'endpoint_id': self.endpoint['id'], + } self.put(url) self.head(url) - self.delete('/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint['id']}) + self.delete( + '/endpoints/%(endpoint_id)s' % {'endpoint_id': self.endpoint['id']} + ) self.head(url, expected_status=http.client.NOT_FOUND) def test_region_service_association_cleanup_when_region_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s/regions/%(region_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id'], - 'region_id': self.region['id']} + url = ( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/services/%(service_id)s/regions/%(region_id)s' + ) % { + 'policy_id': self.policy['id'], + 'service_id': self.service['id'], + 'region_id': self.region['id'], + } self.put(url) self.head(url) - self.delete('/regions/%(region_id)s' % { - 'region_id': self.region['id']}) + self.delete( + '/regions/%(region_id)s' % {'region_id': self.region['id']} + ) self.head(url, expected_status=http.client.NOT_FOUND) def test_region_service_association_cleanup_when_service_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s/regions/%(region_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id'], - 'region_id': self.region['id']} + url = ( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/services/%(service_id)s/regions/%(region_id)s' + ) % { + 'policy_id': self.policy['id'], + 'service_id': self.service['id'], + 'region_id': self.region['id'], + } self.put(url) self.head(url) - self.delete('/services/%(service_id)s' % { - 'service_id': self.service['id']}) + self.delete( + '/services/%(service_id)s' % {'service_id': self.service['id']} + ) self.head(url, expected_status=http.client.NOT_FOUND) def test_service_association_cleanup_when_service_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id']} + url = ( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/services/%(service_id)s' + ) % {'policy_id': self.policy['id'], 'service_id': self.service['id']} self.put(url) self.get(url, expected_status=http.client.NO_CONTENT) - self.delete('/policies/%(policy_id)s' % { - 'policy_id': self.policy['id']}) + self.delete( + '/policies/%(policy_id)s' % {'policy_id': self.policy['id']} + ) self.head(url, expected_status=http.client.NOT_FOUND) def test_service_association_cleanup_when_policy_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id']} + url = ( + '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' + '/services/%(service_id)s' + ) % {'policy_id': self.policy['id'], 'service_id': self.service['id']} self.put(url) self.get(url, expected_status=http.client.NO_CONTENT) - self.delete('/services/%(service_id)s' % { - 'service_id': self.service['id']}) + self.delete( + '/services/%(service_id)s' % {'service_id': self.service['id']} + ) self.head(url, expected_status=http.client.NOT_FOUND) class JsonHomeTests(test_v3.JsonHomeTestMixin): - EXTENSION_LOCATION = ('https://docs.openstack.org/api/openstack-identity/3' - '/ext/OS-ENDPOINT-POLICY/1.0/rel') - PARAM_LOCATION = ('https://docs.openstack.org/api/openstack-identity/3/' - 'param') + EXTENSION_LOCATION = ( + 'https://docs.openstack.org/api/openstack-identity/3' + '/ext/OS-ENDPOINT-POLICY/1.0/rel' + ) + PARAM_LOCATION = ( + 'https://docs.openstack.org/api/openstack-identity/3/' 'param' + ) JSON_HOME_DATA = { - EXTENSION_LOCATION + '/endpoint_policy': { + EXTENSION_LOCATION + + '/endpoint_policy': { 'href-template': '/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/' - 'policy', + 'policy', 'href-vars': { 'endpoint_id': PARAM_LOCATION + '/endpoint_id', }, }, - EXTENSION_LOCATION + '/policy_endpoints': { + EXTENSION_LOCATION + + '/policy_endpoints': { 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints', + 'endpoints', 'href-vars': { 'policy_id': PARAM_LOCATION + '/policy_id', }, }, - EXTENSION_LOCATION + '/endpoint_policy_association': { + EXTENSION_LOCATION + + '/endpoint_policy_association': { 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints/{endpoint_id}', + 'endpoints/{endpoint_id}', 'href-vars': { 'policy_id': PARAM_LOCATION + '/policy_id', 'endpoint_id': PARAM_LOCATION + '/endpoint_id', }, }, - EXTENSION_LOCATION + '/service_policy_association': { + EXTENSION_LOCATION + + '/service_policy_association': { 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}', + 'services/{service_id}', 'href-vars': { 'policy_id': PARAM_LOCATION + '/policy_id', 'service_id': PARAM_LOCATION + '/service_id', }, }, - EXTENSION_LOCATION + '/region_and_service_policy_association': { + EXTENSION_LOCATION + + '/region_and_service_policy_association': { 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}/regions/{region_id}', + 'services/{service_id}/regions/{region_id}', 'href-vars': { 'policy_id': PARAM_LOCATION + '/policy_id', 'service_id': PARAM_LOCATION + '/service_id', diff --git a/keystone/tests/unit/test_v3_federation.py b/keystone/tests/unit/test_v3_federation.py index 59c4c3dd1b..815b80892f 100644 --- a/keystone/tests/unit/test_v3_federation.py +++ b/keystone/tests/unit/test_v3_federation.py @@ -29,6 +29,7 @@ import saml2 from saml2 import saml from saml2 import sigver import urllib + xmldsig = importutils.try_import("saml2.xmldsig") if not xmldsig: xmldsig = importutils.try_import("xmldsig") @@ -76,10 +77,7 @@ class FederatedSetupMixin(object): UNSCOPED_V3_SAML2_REQ = { "identity": { "methods": [AUTH_METHOD], - AUTH_METHOD: { - "identity_provider": IDP, - "protocol": PROTOCOL - } + AUTH_METHOD: {"identity_provider": IDP, "protocol": PROTOCOL}, } } @@ -112,8 +110,14 @@ class FederatedSetupMixin(object): def _check_scoped_token_attributes(self, token): - for obj in ('user', 'catalog', 'expires_at', 'issued_at', - 'methods', 'roles'): + for obj in ( + 'user', + 'catalog', + 'expires_at', + 'issued_at', + 'methods', + 'roles', + ): self.assertIn(obj, token) os_federation = token['user']['OS-FEDERATION'] @@ -148,31 +152,31 @@ class FederatedSetupMixin(object): # Make sure user_name is url safe self.assertEqual(urllib.parse.quote(user['name']), user['name']) - def _issue_unscoped_token(self, - idp=None, - assertion='EMPLOYEE_ASSERTION', - environment=None): + def _issue_unscoped_token( + self, idp=None, assertion='EMPLOYEE_ASSERTION', environment=None + ): environment = environment or {} environment.update(getattr(mapping_fixtures, assertion)) with self.make_request(environ=environment): if idp is None: idp = self.IDP r = authentication.federated_authenticate_for_token( - protocol_id=self.PROTOCOL, identity_provider=idp) + protocol_id=self.PROTOCOL, identity_provider=idp + ) return r def idp_ref(self, id=None): idp = { 'id': id or uuid.uuid4().hex, 'enabled': True, - 'description': uuid.uuid4().hex + 'description': uuid.uuid4().hex, } return idp def proto_ref(self, mapping_id=None): proto = { 'id': uuid.uuid4().hex, - 'mapping_id': mapping_id or uuid.uuid4().hex + 'mapping_id': mapping_id or uuid.uuid4().hex, } return proto @@ -180,25 +184,17 @@ class FederatedSetupMixin(object): return { 'id': uuid.uuid4().hex, 'rules': rules or self.rules['rules'], - 'schema_version': "1.0" + 'schema_version': "1.0", } def _scope_request(self, unscoped_token_id, scope, scope_id): return { 'auth': { 'identity': { - 'methods': [ - 'token' - ], - 'token': { - 'id': unscoped_token_id - } + 'methods': ['token'], + 'token': {'id': unscoped_token_id}, }, - 'scope': { - scope: { - 'id': scope_id - } - } + 'scope': {scope: {'id': scope_id}}, } } @@ -210,57 +206,53 @@ class FederatedSetupMixin(object): """Inject additional data.""" # Create and add domains self.domainA = unit.new_domain_ref() - PROVIDERS.resource_api.create_domain( - self.domainA['id'], self.domainA - ) + PROVIDERS.resource_api.create_domain(self.domainA['id'], self.domainA) self.domainB = unit.new_domain_ref() - PROVIDERS.resource_api.create_domain( - self.domainB['id'], self.domainB - ) + PROVIDERS.resource_api.create_domain(self.domainB['id'], self.domainB) self.domainC = unit.new_domain_ref() - PROVIDERS.resource_api.create_domain( - self.domainC['id'], self.domainC - ) + PROVIDERS.resource_api.create_domain(self.domainC['id'], self.domainC) self.domainD = unit.new_domain_ref() - PROVIDERS.resource_api.create_domain( - self.domainD['id'], self.domainD - ) + PROVIDERS.resource_api.create_domain(self.domainD['id'], self.domainD) # Create and add projects self.proj_employees = unit.new_project_ref( - domain_id=self.domainA['id']) + domain_id=self.domainA['id'] + ) PROVIDERS.resource_api.create_project( self.proj_employees['id'], self.proj_employees ) self.proj_customers = unit.new_project_ref( - domain_id=self.domainA['id']) + domain_id=self.domainA['id'] + ) PROVIDERS.resource_api.create_project( self.proj_customers['id'], self.proj_customers ) - self.project_all = unit.new_project_ref( - domain_id=self.domainA['id']) + self.project_all = unit.new_project_ref(domain_id=self.domainA['id']) PROVIDERS.resource_api.create_project( self.project_all['id'], self.project_all ) self.project_inherited = unit.new_project_ref( - domain_id=self.domainD['id']) + domain_id=self.domainD['id'] + ) PROVIDERS.resource_api.create_project( self.project_inherited['id'], self.project_inherited ) # Create and add groups self.group_employees = unit.new_group_ref(domain_id=self.domainA['id']) - self.group_employees = ( - PROVIDERS.identity_api.create_group(self.group_employees)) + self.group_employees = PROVIDERS.identity_api.create_group( + self.group_employees + ) self.group_customers = unit.new_group_ref(domain_id=self.domainA['id']) - self.group_customers = ( - PROVIDERS.identity_api.create_group(self.group_customers)) + self.group_customers = PROVIDERS.identity_api.create_group( + self.group_customers + ) self.group_admins = unit.new_group_ref(domain_id=self.domainA['id']) self.group_admins = PROVIDERS.identity_api.create_group( @@ -284,18 +276,21 @@ class FederatedSetupMixin(object): # * proj_employees # * project_all PROVIDERS.assignment_api.create_grant( - self.role_employee['id'], group_id=self.group_employees['id'], - project_id=self.proj_employees['id'] + self.role_employee['id'], + group_id=self.group_employees['id'], + project_id=self.proj_employees['id'], ) PROVIDERS.assignment_api.create_grant( - self.role_employee['id'], group_id=self.group_employees['id'], - project_id=self.project_all['id'] + self.role_employee['id'], + group_id=self.group_employees['id'], + project_id=self.project_all['id'], ) # Customers can access # * proj_customers PROVIDERS.assignment_api.create_grant( - self.role_customer['id'], group_id=self.group_customers['id'], - project_id=self.proj_customers['id'] + self.role_customer['id'], + group_id=self.group_customers['id'], + project_id=self.proj_customers['id'], ) # Admins can access: @@ -303,30 +298,36 @@ class FederatedSetupMixin(object): # * proj_employees # * project_all PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], group_id=self.group_admins['id'], - project_id=self.proj_customers['id'] + self.role_admin['id'], + group_id=self.group_admins['id'], + project_id=self.proj_customers['id'], ) PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], group_id=self.group_admins['id'], - project_id=self.proj_employees['id'] + self.role_admin['id'], + group_id=self.group_admins['id'], + project_id=self.proj_employees['id'], ) PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], group_id=self.group_admins['id'], - project_id=self.project_all['id'] + self.role_admin['id'], + group_id=self.group_admins['id'], + project_id=self.project_all['id'], ) # Customers can access: # * domain A PROVIDERS.assignment_api.create_grant( - self.role_customer['id'], group_id=self.group_customers['id'], - domain_id=self.domainA['id'] + self.role_customer['id'], + group_id=self.group_customers['id'], + domain_id=self.domainA['id'], ) # Customers can access projects via inheritance: # * domain D PROVIDERS.assignment_api.create_grant( - self.role_customer['id'], group_id=self.group_customers['id'], - domain_id=self.domainD['id'], inherited_to_projects=True + self.role_customer['id'], + group_id=self.group_customers['id'], + domain_id=self.domainD['id'], + inherited_to_projects=True, ) # Employees can access: @@ -334,12 +335,14 @@ class FederatedSetupMixin(object): # * domain B PROVIDERS.assignment_api.create_grant( - self.role_employee['id'], group_id=self.group_employees['id'], - domain_id=self.domainA['id'] + self.role_employee['id'], + group_id=self.group_employees['id'], + domain_id=self.domainA['id'], ) PROVIDERS.assignment_api.create_grant( - self.role_employee['id'], group_id=self.group_employees['id'], - domain_id=self.domainB['id'] + self.role_employee['id'], + group_id=self.group_employees['id'], + domain_id=self.domainB['id'], ) # Admins can access: @@ -347,166 +350,84 @@ class FederatedSetupMixin(object): # * domain B # * domain C PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], group_id=self.group_admins['id'], - domain_id=self.domainA['id'] + self.role_admin['id'], + group_id=self.group_admins['id'], + domain_id=self.domainA['id'], ) PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], group_id=self.group_admins['id'], - domain_id=self.domainB['id'] + self.role_admin['id'], + group_id=self.group_admins['id'], + domain_id=self.domainB['id'], ) PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], group_id=self.group_admins['id'], - domain_id=self.domainC['id'] + self.role_admin['id'], + group_id=self.group_admins['id'], + domain_id=self.domainC['id'], ) self.rules = { 'rules': [ { 'local': [ - { - 'group': { - 'id': self.group_employees['id'] - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } + {'group': {'id': self.group_employees['id']}}, + {'user': {'name': '{0}', 'id': '{1}'}}, ], 'remote': [ - { - 'type': 'UserName' - }, + {'type': 'UserName'}, { 'type': 'Email', }, - { - 'type': 'orgPersonType', - 'any_one_of': [ - 'Employee' - ] - } - ] + {'type': 'orgPersonType', 'any_one_of': ['Employee']}, + ], }, { 'local': [ - { - 'group': { - 'id': self.group_employees['id'] - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } + {'group': {'id': self.group_employees['id']}}, + {'user': {'name': '{0}', 'id': '{1}'}}, ], 'remote': [ - { - 'type': self.ASSERTION_PREFIX + 'UserName' - }, + {'type': self.ASSERTION_PREFIX + 'UserName'}, { 'type': self.ASSERTION_PREFIX + 'Email', }, { 'type': self.ASSERTION_PREFIX + 'orgPersonType', - 'any_one_of': [ - 'SuperEmployee' - ] - } - ] + 'any_one_of': ['SuperEmployee'], + }, + ], }, { 'local': [ - { - 'group': { - 'id': self.group_customers['id'] - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } + {'group': {'id': self.group_customers['id']}}, + {'user': {'name': '{0}', 'id': '{1}'}}, ], 'remote': [ - { - 'type': 'UserName' - }, - { - 'type': 'Email' - }, - { - 'type': 'orgPersonType', - 'any_one_of': [ - 'Customer' - ] - } - ] + {'type': 'UserName'}, + {'type': 'Email'}, + {'type': 'orgPersonType', 'any_one_of': ['Customer']}, + ], }, { 'local': [ - { - 'group': { - 'id': self.group_admins['id'] - } - }, - { - 'group': { - 'id': self.group_employees['id'] - } - }, - { - 'group': { - 'id': self.group_customers['id'] - } - }, - - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } + {'group': {'id': self.group_admins['id']}}, + {'group': {'id': self.group_employees['id']}}, + {'group': {'id': self.group_customers['id']}}, + {'user': {'name': '{0}', 'id': '{1}'}}, ], 'remote': [ - { - 'type': 'UserName' - }, - { - 'type': 'Email' - }, + {'type': 'UserName'}, + {'type': 'Email'}, { 'type': 'orgPersonType', - 'any_one_of': [ - 'Admin', - 'Chief' - ] - } - ] + 'any_one_of': ['Admin', 'Chief'], + }, + ], }, { 'local': [ - { - 'group': { - 'id': uuid.uuid4().hex - } - }, - { - 'group': { - 'id': self.group_customers['id'] - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } + {'group': {'id': uuid.uuid4().hex}}, + {'group': {'id': self.group_customers['id']}}, + {'user': {'name': '{0}', 'id': '{1}'}}, ], 'remote': [ { @@ -515,33 +436,14 @@ class FederatedSetupMixin(object): { 'type': 'Email', }, - { - 'type': 'FirstName', - 'any_one_of': [ - 'Jill' - ] - }, - { - 'type': 'LastName', - 'any_one_of': [ - 'Smith' - ] - } - ] + {'type': 'FirstName', 'any_one_of': ['Jill']}, + {'type': 'LastName', 'any_one_of': ['Smith']}, + ], }, { 'local': [ - { - 'group': { - 'id': 'this_group_no_longer_exists' - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } + {'group': {'id': 'this_group_no_longer_exists'}}, + {'user': {'name': '{0}', 'id': '{1}'}}, ], 'remote': [ { @@ -552,35 +454,21 @@ class FederatedSetupMixin(object): }, { 'type': 'Email', - 'any_one_of': [ - 'testacct@example.com' - ] + 'any_one_of': ['testacct@example.com'], }, - { - 'type': 'orgPersonType', - 'any_one_of': [ - 'Tester' - ] - } - ] + {'type': 'orgPersonType', 'any_one_of': ['Tester']}, + ], }, # rules with local group names { "local": [ - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - }, + {'user': {'name': '{0}', 'id': '{1}'}}, { "group": { "name": self.group_customers['name'], - "domain": { - "name": self.domainA['name'] - } + "domain": {"name": self.domainA['name']}, } - } + }, ], "remote": [ { @@ -591,29 +479,19 @@ class FederatedSetupMixin(object): }, { "type": "orgPersonType", - "any_one_of": [ - "CEO", - "CTO" - ], - } - ] + "any_one_of": ["CEO", "CTO"], + }, + ], }, { "local": [ - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - }, + {'user': {'name': '{0}', 'id': '{1}'}}, { "group": { "name": self.group_admins['name'], - "domain": { - "id": self.domainA['id'] - } + "domain": {"id": self.domainA['id']}, } - } + }, ], "remote": [ { @@ -622,30 +500,18 @@ class FederatedSetupMixin(object): { "type": "Email", }, - { - "type": "orgPersonType", - "any_one_of": [ - "Managers" - ] - } - ] + {"type": "orgPersonType", "any_one_of": ["Managers"]}, + ], }, { "local": [ - { - "user": { - "name": "{0}", - "id": "{1}" - } - }, + {"user": {"name": "{0}", "id": "{1}"}}, { "group": { "name": "NON_EXISTING", - "domain": { - "id": self.domainA['id'] - } + "domain": {"id": self.domainA['id']}, } - } + }, ], "remote": [ { @@ -654,13 +520,8 @@ class FederatedSetupMixin(object): { "type": "Email", }, - { - "type": "UserName", - "any_one_of": [ - "IamTester" - ] - } - ] + {"type": "UserName", "any_one_of": ["IamTester"]}, + ], }, { "local": [ @@ -668,25 +529,12 @@ class FederatedSetupMixin(object): "user": { "type": "local", "name": self.user['name'], - "domain": { - "id": self.user['domain_id'] - } + "domain": {"id": self.user['domain_id']}, } }, - { - "group": { - "id": self.group_customers['id'] - } - } + {"group": {"id": self.group_customers['id']}}, ], - "remote": [ - { - "type": "UserType", - "any_one_of": [ - "random" - ] - } - ] + "remote": [{"type": "UserType", "any_one_of": ["random"]}], }, { "local": [ @@ -694,31 +542,17 @@ class FederatedSetupMixin(object): "user": { "type": "local", "name": self.user['name'], - "domain": { - "id": uuid.uuid4().hex - } + "domain": {"id": uuid.uuid4().hex}, } } ], "remote": [ - { - "type": "Position", - "any_one_of": [ - "DirectorGeneral" - ] - } - ] + {"type": "Position", "any_one_of": ["DirectorGeneral"]} + ], }, # rules for users with no groups { - "local": [ - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } - ], + "local": [{'user': {'name': '{0}', 'id': '{1}'}}], "remote": [ { 'type': 'UserName', @@ -728,12 +562,10 @@ class FederatedSetupMixin(object): }, { 'type': 'orgPersonType', - 'any_one_of': [ - 'NoGroupsOrg' - ] - } - ] - } + 'any_one_of': ['NoGroupsOrg'], + }, + ], + }, ] } @@ -744,9 +576,7 @@ class FederatedSetupMixin(object): ) # Add IDP self.idp = self.idp_ref(id=self.IDP) - PROVIDERS.federation_api.create_idp( - self.idp['id'], self.idp - ) + PROVIDERS.federation_api.create_idp(self.idp['id'], self.idp) # Add IDP with remote self.idp_with_remote = self.idp_ref(id=self.IDP_WITH_REMOTE) self.idp_with_remote['remote_ids'] = self.REMOTE_IDS @@ -776,62 +606,83 @@ class FederatedSetupMixin(object): with self.make_request(): self.tokens = {} - VARIANTS = ('EMPLOYEE_ASSERTION', 'CUSTOMER_ASSERTION', - 'ADMIN_ASSERTION') + VARIANTS = ( + 'EMPLOYEE_ASSERTION', + 'CUSTOMER_ASSERTION', + 'ADMIN_ASSERTION', + ) for variant in VARIANTS: self._inject_assertion(variant) r = authentication.authenticate_for_token( - self.UNSCOPED_V3_SAML2_REQ) + self.UNSCOPED_V3_SAML2_REQ + ) self.tokens[variant] = r.id self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN = ( self._scope_request( - uuid.uuid4().hex, 'project', self.proj_customers['id'])) + uuid.uuid4().hex, 'project', self.proj_customers['id'] + ) + ) self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE = ( self._scope_request( - self.tokens['EMPLOYEE_ASSERTION'], 'project', - self.proj_employees['id'])) + self.tokens['EMPLOYEE_ASSERTION'], + 'project', + self.proj_employees['id'], + ) + ) self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'project', - self.proj_employees['id']) + self.tokens['ADMIN_ASSERTION'], + 'project', + self.proj_employees['id'], + ) self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'project', - self.proj_customers['id']) + self.tokens['ADMIN_ASSERTION'], + 'project', + self.proj_customers['id'], + ) self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER = ( self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'project', - self.proj_employees['id'])) + self.tokens['CUSTOMER_ASSERTION'], + 'project', + self.proj_employees['id'], + ) + ) self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER = ( self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'project', - self.project_inherited['id'])) + self.tokens['CUSTOMER_ASSERTION'], + 'project', + self.project_inherited['id'], + ) + ) self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER = self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'domain', - self.domainA['id']) + self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainA['id'] + ) self.TOKEN_SCOPE_DOMAIN_B_FROM_CUSTOMER = self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'domain', - self.domainB['id']) + self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainB['id'] + ) self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER = self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'domain', - self.domainD['id']) + self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainD['id'] + ) self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'domain', self.domainA['id']) + self.tokens['ADMIN_ASSERTION'], 'domain', self.domainA['id'] + ) self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'domain', self.domainB['id']) + self.tokens['ADMIN_ASSERTION'], 'domain', self.domainB['id'] + ) self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'domain', - self.domainC['id']) + self.tokens['ADMIN_ASSERTION'], 'domain', self.domainC['id'] + ) class FederatedIdentityProviderTests(test_v3.RestfulTestCase): @@ -846,8 +697,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): return '/OS-FEDERATION/identity_providers/' + str(suffix) return '/OS-FEDERATION/identity_providers' - def _fetch_attribute_from_response(self, resp, parameter, - assert_is_not_none=True): + def _fetch_attribute_from_response( + self, resp, parameter, assert_is_not_none=True + ): """Fetch single attribute from TestResponse object.""" result = resp.result.get(parameter) if assert_is_not_none: @@ -857,8 +709,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): def _create_and_decapsulate_response(self, body=None): """Create IdP and fetch it's random id along with entity.""" default_resp = self._create_default_idp(body=body) - idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) self.assertIsNotNone(idp) idp_id = idp.get('id') return (idp_id, idp) @@ -869,14 +722,18 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): resp = self.get(url) return resp - def _create_default_idp(self, body=None, - expected_status=http.client.CREATED): + def _create_default_idp( + self, body=None, expected_status=http.client.CREATED + ): """Create default IdP.""" url = self.base_url(suffix=uuid.uuid4().hex) if body is None: body = self._http_idp_input() - resp = self.put(url, body={'identity_provider': body}, - expected_status=expected_status) + resp = self.put( + url, + body={'identity_provider': body}, + expected_status=expected_status, + ) return resp def _http_idp_input(self): @@ -885,8 +742,15 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body['description'] = uuid.uuid4().hex return body - def _assign_protocol_to_idp(self, idp_id=None, proto=None, url=None, - mapping_id=None, validate=True, **kwargs): + def _assign_protocol_to_idp( + self, + idp_id=None, + proto=None, + url=None, + mapping_id=None, + validate=True, + **kwargs + ): if url is None: url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s') if idp_id is None: @@ -900,10 +764,13 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): url = url % {'idp_id': idp_id, 'protocol_id': proto} resp = self.put(url, body={'protocol': body}, **kwargs) if validate: - self.assertValidResponse(resp, 'protocol', dummy_validator, - keys_to_check=['id', 'mapping_id'], - ref={'id': proto, - 'mapping_id': mapping_id}) + self.assertValidResponse( + resp, + 'protocol', + dummy_validator, + keys_to_check=['id', 'mapping_id'], + ref={'id': proto, 'mapping_id': mapping_id}, + ) return (resp, idp_id, proto) def _get_protocol(self, idp_id, protocol_id): @@ -916,9 +783,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER mapping['id'] = mapping_id url = '/OS-FEDERATION/mappings/%s' % mapping_id - self.put(url, - body={'mapping': mapping}, - expected_status=http.client.CREATED) + self.put( + url, body={'mapping': mapping}, expected_status=http.client.CREATED + ) def assertIdpDomainCreated(self, idp_id, domain_id): domain = PROVIDERS.resource_api.get_domain(domain_id) @@ -931,9 +798,13 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body = self.default_body.copy() body['description'] = uuid.uuid4().hex resp = self._create_default_idp(body=body) - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) + self.assertValidResponse( + resp, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=body, + ) attr = self._fetch_attribute_from_response(resp, 'identity_provider') self.assertIdpDomainCreated(attr['id'], attr['domain_id']) @@ -946,9 +817,13 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): PROVIDERS.resource_api.create_domain(domain['id'], domain) body['domain_id'] = domain['id'] resp = self._create_default_idp(body=body) - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) + self.assertValidResponse( + resp, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=body, + ) def test_create_idp_domain_id_none(self): keys_to_check = list(self.idp_keys) @@ -956,9 +831,13 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body['description'] = uuid.uuid4().hex body['domain_id'] = None resp = self._create_default_idp(body=body) - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) + self.assertValidResponse( + resp, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=body, + ) attr = self._fetch_attribute_from_response(resp, 'identity_provider') self.assertIdpDomainCreated(attr['id'], attr['domain_id']) @@ -979,7 +858,7 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): resp = self.put( self.base_url(suffix=idp_id), body={'identity_provider': self.default_body.copy()}, - expected_status=http.client.CONFLICT + expected_status=http.client.CONFLICT, ) domains = PROVIDERS.resource_api.list_domains() self.assertEqual(number_of_domains, len(domains)) @@ -1006,7 +885,7 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): resp = self.put( self.base_url(suffix=idp_id), body={'identity_provider': body}, - expected_status=http.client.CONFLICT + expected_status=http.client.CONFLICT, ) # Make sure the domain specified in the second request was not deleted, @@ -1024,29 +903,43 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body['description'] = uuid.uuid4().hex body['domain_id'] = domain['id'] idp1 = self._create_default_idp(body=body) - self.assertValidResponse(idp1, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) + self.assertValidResponse( + idp1, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=body, + ) # create a 2nd idp with the same domain_id url = self.base_url(suffix=uuid.uuid4().hex) body = self.default_body.copy() body['description'] = uuid.uuid4().hex body['domain_id'] = domain['id'] - idp2 = self.put(url, body={'identity_provider': body}, - expected_status=http.client.CREATED) - self.assertValidResponse(idp2, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) + idp2 = self.put( + url, + body={'identity_provider': body}, + expected_status=http.client.CREATED, + ) + self.assertValidResponse( + idp2, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=body, + ) - self.assertEqual(idp1.result['identity_provider']['domain_id'], - idp2.result['identity_provider']['domain_id']) + self.assertEqual( + idp1.result['identity_provider']['domain_id'], + idp2.result['identity_provider']['domain_id'], + ) def test_cannot_update_idp_domain(self): # create new idp body = self.default_body.copy() default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') self.assertIsNotNone(idp_id) # create domain and try to update the idp's domain @@ -1061,8 +954,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body = self.default_body.copy() body['description'] = uuid.uuid4().hex body['domain_id'] = uuid.uuid4().hex - self._create_default_idp(body=body, - expected_status=http.client.NOT_FOUND) + self._create_default_idp( + body=body, expected_status=http.client.NOT_FOUND + ) def test_create_idp_remote(self): """Create the IdentityProvider entity associated to remote_ids.""" @@ -1070,13 +964,19 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): keys_to_check.append('remote_ids') body = self.default_body.copy() body['description'] = uuid.uuid4().hex - body['remote_ids'] = [uuid.uuid4().hex, - uuid.uuid4().hex, - uuid.uuid4().hex] + body['remote_ids'] = [ + uuid.uuid4().hex, + uuid.uuid4().hex, + uuid.uuid4().hex, + ] resp = self._create_default_idp(body=body) - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) + self.assertValidResponse( + resp, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=body, + ) attr = self._fetch_attribute_from_response(resp, 'identity_provider') self.assertIdpDomainCreated(attr['id'], attr['domain_id']) @@ -1091,21 +991,26 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): """ body = self.default_body.copy() repeated_remote_id = uuid.uuid4().hex - body['remote_ids'] = [uuid.uuid4().hex, - uuid.uuid4().hex, - uuid.uuid4().hex, - repeated_remote_id] + body['remote_ids'] = [ + uuid.uuid4().hex, + uuid.uuid4().hex, + uuid.uuid4().hex, + repeated_remote_id, + ] self._create_default_idp(body=body) url = self.base_url(suffix=uuid.uuid4().hex) - body['remote_ids'] = [uuid.uuid4().hex, - repeated_remote_id] - resp = self.put(url, body={'identity_provider': body}, - expected_status=http.client.CONFLICT) + body['remote_ids'] = [uuid.uuid4().hex, repeated_remote_id] + resp = self.put( + url, + body={'identity_provider': body}, + expected_status=http.client.CONFLICT, + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Duplicate remote ID', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Duplicate remote ID', resp_data.get('error', {}).get('message') + ) def test_create_idp_remote_empty(self): """Create an IdP with empty remote_ids.""" @@ -1115,9 +1020,13 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body['description'] = uuid.uuid4().hex body['remote_ids'] = [] resp = self._create_default_idp(body=body) - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) + self.assertValidResponse( + resp, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=body, + ) def test_create_idp_remote_none(self): """Create an IdP with a None remote_ids.""" @@ -1129,9 +1038,13 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): resp = self._create_default_idp(body=body) expected = body.copy() expected['remote_ids'] = [] - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=expected) + self.assertValidResponse( + resp, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=expected, + ) def test_create_idp_authorization_ttl(self): keys_to_check = list(self.idp_keys) @@ -1141,17 +1054,22 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body['authorization_ttl'] = 10080 resp = self._create_default_idp(body) expected = body.copy() - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=expected) + self.assertValidResponse( + resp, + 'identity_provider', + dummy_validator, + keys_to_check=keys_to_check, + ref=expected, + ) def test_update_idp_remote_ids(self): """Update IdP's remote_ids parameter.""" body = self.default_body.copy() body['remote_ids'] = [uuid.uuid4().hex] default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') url = self.base_url(suffix=idp_id) self.assertIsNotNone(idp_id) @@ -1160,25 +1078,30 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body = {'identity_provider': body} resp = self.patch(url, body=body) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') + updated_idp = self._fetch_attribute_from_response( + resp, 'identity_provider' + ) body = body['identity_provider'] - self.assertEqual(sorted(body['remote_ids']), - sorted(updated_idp.get('remote_ids'))) + self.assertEqual( + sorted(body['remote_ids']), sorted(updated_idp.get('remote_ids')) + ) resp = self.get(url) - returned_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - self.assertEqual(sorted(body['remote_ids']), - sorted(returned_idp.get('remote_ids'))) + returned_idp = self._fetch_attribute_from_response( + resp, 'identity_provider' + ) + self.assertEqual( + sorted(body['remote_ids']), sorted(returned_idp.get('remote_ids')) + ) def test_update_idp_clean_remote_ids(self): """Update IdP's remote_ids parameter with an empty list.""" body = self.default_body.copy() body['remote_ids'] = [uuid.uuid4().hex] default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') url = self.base_url(suffix=idp_id) self.assertIsNotNone(idp_id) @@ -1187,17 +1110,21 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body = {'identity_provider': body} resp = self.patch(url, body=body) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') + updated_idp = self._fetch_attribute_from_response( + resp, 'identity_provider' + ) body = body['identity_provider'] - self.assertEqual(sorted(body['remote_ids']), - sorted(updated_idp.get('remote_ids'))) + self.assertEqual( + sorted(body['remote_ids']), sorted(updated_idp.get('remote_ids')) + ) resp = self.get(url) - returned_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - self.assertEqual(sorted(body['remote_ids']), - sorted(returned_idp.get('remote_ids'))) + returned_idp = self._fetch_attribute_from_response( + resp, 'identity_provider' + ) + self.assertEqual( + sorted(body['remote_ids']), sorted(returned_idp.get('remote_ids')) + ) def test_update_idp_remote_repeated(self): """Update an IdentityProvider entity reusing a remote_id. @@ -1217,24 +1144,28 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): # Create second identity provider (without remote_ids) body = self.default_body.copy() default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') url = self.base_url(suffix=idp_id) body['remote_ids'] = [repeated_remote_id] - resp = self.patch(url, body={'identity_provider': body}, - expected_status=http.client.CONFLICT) + resp = self.patch( + url, + body={'identity_provider': body}, + expected_status=http.client.CONFLICT, + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Duplicate remote ID', - resp_data['error']['message']) + self.assertIn('Duplicate remote ID', resp_data['error']['message']) def test_update_idp_authorization_ttl(self): body = self.default_body.copy() body['authorization_ttl'] = 10080 default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') url = self.base_url(suffix=idp_id) self.assertIsNotNone(idp_id) @@ -1243,17 +1174,21 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body = {'identity_provider': body} resp = self.patch(url, body=body) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') + updated_idp = self._fetch_attribute_from_response( + resp, 'identity_provider' + ) body = body['identity_provider'] - self.assertEqual(body['authorization_ttl'], - updated_idp.get('authorization_ttl')) + self.assertEqual( + body['authorization_ttl'], updated_idp.get('authorization_ttl') + ) resp = self.get(url) - returned_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - self.assertEqual(body['authorization_ttl'], - returned_idp.get('authorization_ttl')) + returned_idp = self._fetch_attribute_from_response( + resp, 'identity_provider' + ) + self.assertEqual( + body['authorization_ttl'], returned_idp.get('authorization_ttl') + ) def test_list_head_idps(self, iterations=5): """List all available IdentityProviders. @@ -1264,9 +1199,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): because other tests also create IdPs. """ + def get_id(resp): - r = self._fetch_attribute_from_response(resp, - 'identity_provider') + r = self._fetch_attribute_from_response(resp, 'identity_provider') return r.get('id') ids = [] @@ -1279,11 +1214,15 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): keys_to_check.append('domain_id') url = self.base_url() resp = self.get(url) - self.assertValidListResponse(resp, 'identity_providers', - dummy_validator, - keys_to_check=keys_to_check) - entities = self._fetch_attribute_from_response(resp, - 'identity_providers') + self.assertValidListResponse( + resp, + 'identity_providers', + dummy_validator, + keys_to_check=keys_to_check, + ) + entities = self._fetch_attribute_from_response( + resp, 'identity_providers' + ) entities_ids = set([e['id'] for e in entities]) ids_intersection = entities_ids.intersection(ids) self.assertEqual(ids_intersection, ids) @@ -1292,8 +1231,7 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): def test_filter_list_head_idp_by_id(self): def get_id(resp): - r = self._fetch_attribute_from_response(resp, - 'identity_provider') + r = self._fetch_attribute_from_response(resp, 'identity_provider') return r.get('id') idp1_id = get_id(self._create_default_idp()) @@ -1302,8 +1240,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): # list the IdP, should get two IdP. url = self.base_url() resp = self.get(url) - entities = self._fetch_attribute_from_response(resp, - 'identity_providers') + entities = self._fetch_attribute_from_response( + resp, 'identity_providers' + ) entities_ids = [e['id'] for e in entities] self.assertCountEqual(entities_ids, [idp1_id, idp2_id]) @@ -1318,8 +1257,7 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): def test_filter_list_head_idp_by_enabled(self): def get_id(resp): - r = self._fetch_attribute_from_response(resp, - 'identity_provider') + r = self._fetch_attribute_from_response(resp, 'identity_provider') return r.get('id') idp1_id = get_id(self._create_default_idp()) @@ -1331,8 +1269,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): # list the IdP, should get two IdP. url = self.base_url() resp = self.get(url) - entities = self._fetch_attribute_from_response(resp, - 'identity_providers') + entities = self._fetch_attribute_from_response( + resp, 'identity_providers' + ) entities_ids = [e['id'] for e in entities] self.assertCountEqual(entities_ids, [idp1_id, idp2_id]) @@ -1356,14 +1295,21 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) body['domain_id'] = domain['id'] - self.put(url, body={'identity_provider': body}, - expected_status=http.client.CREATED) - resp = self.put(url, body={'identity_provider': body}, - expected_status=http.client.CONFLICT) + self.put( + url, + body={'identity_provider': body}, + expected_status=http.client.CREATED, + ) + resp = self.put( + url, + body={'identity_provider': body}, + expected_status=http.client.CONFLICT, + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Duplicate entry', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Duplicate entry', resp_data.get('error', {}).get('message') + ) def test_get_head_idp(self): """Create and later fetch IdP.""" @@ -1372,17 +1318,22 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): PROVIDERS.resource_api.create_domain(domain['id'], domain) body['domain_id'] = domain['id'] default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') url = self.base_url(suffix=idp_id) resp = self.get(url) # Strip keys out of `body` dictionary. This is done # to be python 3 compatible body_keys = list(body) - self.assertValidResponse(resp, 'identity_provider', - dummy_validator, keys_to_check=body_keys, - ref=body) + self.assertValidResponse( + resp, + 'identity_provider', + dummy_validator, + keys_to_check=body_keys, + ref=body, + ) self.head(url, expected_status=http.client.OK) @@ -1404,8 +1355,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): Expect HTTP 404 Not Found for the GET IdP call. """ default_resp = self._create_default_idp() - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') self.assertIsNotNone(idp_id) url = self.base_url(suffix=idp_id) @@ -1416,8 +1368,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): """Deleting an IdP will delete its assigned protocol.""" # create default IdP default_resp = self._create_default_idp() - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp['id'] protocol_id = uuid.uuid4().hex @@ -1427,10 +1380,8 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): # assign protocol to IdP kwargs = {'expected_status': http.client.CREATED} resp, idp_id, proto = self._assign_protocol_to_idp( - url=url, - idp_id=idp_id, - proto=protocol_id, - **kwargs) + url=url, idp_id=idp_id, proto=protocol_id, **kwargs + ) # removing IdP will remove the assigned protocol as well self.assertEqual( @@ -1454,36 +1405,43 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): def test_update_idp_mutable_attributes(self): """Update IdP's mutable parameters.""" default_resp = self._create_default_idp() - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') url = self.base_url(suffix=idp_id) self.assertIsNotNone(idp_id) _enabled = not default_idp.get('enabled') - body = {'remote_ids': [uuid.uuid4().hex, uuid.uuid4().hex], - 'description': uuid.uuid4().hex, - 'enabled': _enabled} + body = { + 'remote_ids': [uuid.uuid4().hex, uuid.uuid4().hex], + 'description': uuid.uuid4().hex, + 'enabled': _enabled, + } body = {'identity_provider': body} resp = self.patch(url, body=body) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') + updated_idp = self._fetch_attribute_from_response( + resp, 'identity_provider' + ) body = body['identity_provider'] for key in body.keys(): if isinstance(body[key], list): - self.assertEqual(sorted(body[key]), - sorted(updated_idp.get(key))) + self.assertEqual( + sorted(body[key]), sorted(updated_idp.get(key)) + ) else: self.assertEqual(body[key], updated_idp.get(key)) resp = self.get(url) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') + updated_idp = self._fetch_attribute_from_response( + resp, 'identity_provider' + ) for key in body.keys(): if isinstance(body[key], list): - self.assertEqual(sorted(body[key]), - sorted(updated_idp.get(key))) + self.assertEqual( + sorted(body[key]), sorted(updated_idp.get(key)) + ) else: self.assertEqual(body[key], updated_idp.get(key)) @@ -1494,8 +1452,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): """ default_resp = self._create_default_idp() - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') + default_idp = self._fetch_attribute_from_response( + default_resp, 'identity_provider' + ) idp_id = default_idp.get('id') self.assertIsNotNone(idp_id) @@ -1504,8 +1463,11 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): body['protocols'] = [uuid.uuid4().hex, uuid.uuid4().hex] url = self.base_url(suffix=idp_id) - self.patch(url, body={'identity_provider': body}, - expected_status=http.client.BAD_REQUEST) + self.patch( + url, + body={'identity_provider': body}, + expected_status=http.client.BAD_REQUEST, + ) def test_update_nonexistent_idp(self): """Update nonexistent IdP. @@ -1540,11 +1502,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s') kwargs = {'expected_status': http.client.CREATED} - self._assign_protocol_to_idp(proto='saml2', - url=url, **kwargs) + self._assign_protocol_to_idp(proto='saml2', url=url, **kwargs) - self._assign_protocol_to_idp(proto='saml2', - url=url, **kwargs) + self._assign_protocol_to_idp(proto='saml2', url=url, **kwargs) def test_protocol_idp_pk_uniqueness(self): """Test whether Keystone checks for unique idp/protocol values. @@ -1556,8 +1516,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s') kwargs = {'expected_status': http.client.CREATED} - resp, idp_id, proto = self._assign_protocol_to_idp(proto='saml2', - url=url, **kwargs) + resp, idp_id, proto = self._assign_protocol_to_idp( + proto='saml2', url=url, **kwargs + ) kwargs = {'expected_status': http.client.CONFLICT} self._assign_protocol_to_idp( idp_id=idp_id, proto='saml2', validate=False, url=url, **kwargs @@ -1571,10 +1532,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): """ idp_id = uuid.uuid4().hex kwargs = {'expected_status': http.client.NOT_FOUND} - self._assign_protocol_to_idp(proto='saml2', - idp_id=idp_id, - validate=False, - **kwargs) + self._assign_protocol_to_idp( + proto='saml2', idp_id=idp_id, validate=False, **kwargs + ) def test_crud_protocol_without_protocol_id_in_url(self): # NOTE(morgan): This test is redundant but is added to ensure @@ -1583,51 +1543,61 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): idp_id, _ = self._create_and_decapsulate_response() mapping_id = uuid.uuid4().hex self._create_mapping(mapping_id=mapping_id) - protocol = { - 'id': uuid.uuid4().hex, - 'mapping_id': mapping_id - } + protocol = {'id': uuid.uuid4().hex, 'mapping_id': mapping_id} with self.test_client() as c: token = self.get_scoped_token() # DELETE/PATCH/PUT on non-trailing `/` results in # METHOD_NOT_ALLOWED - c.delete('/v3/OS-FEDERATION/identity_providers/%(idp_id)s' - '/protocols' % {'idp_id': idp_id}, - headers={'X-Auth-Token': token}, - expected_status_code=http.client.METHOD_NOT_ALLOWED) - c.patch('/v3/OS-FEDERATION/identity_providers/%(idp_id)s' - '/protocols/' % {'idp_id': idp_id}, - json={'protocol': protocol}, - headers={'X-Auth-Token': token}, - expected_status_code=http.client.METHOD_NOT_ALLOWED) - c.put('/v3/OS-FEDERATION/identity_providers/%(idp_id)s' - '/protocols' % {'idp_id': idp_id}, - json={'protocol': protocol}, - headers={'X-Auth-Token': token}, - expected_status_code=http.client.METHOD_NOT_ALLOWED) + c.delete( + '/v3/OS-FEDERATION/identity_providers/%(idp_id)s' + '/protocols' % {'idp_id': idp_id}, + headers={'X-Auth-Token': token}, + expected_status_code=http.client.METHOD_NOT_ALLOWED, + ) + c.patch( + '/v3/OS-FEDERATION/identity_providers/%(idp_id)s' + '/protocols/' % {'idp_id': idp_id}, + json={'protocol': protocol}, + headers={'X-Auth-Token': token}, + expected_status_code=http.client.METHOD_NOT_ALLOWED, + ) + c.put( + '/v3/OS-FEDERATION/identity_providers/%(idp_id)s' + '/protocols' % {'idp_id': idp_id}, + json={'protocol': protocol}, + headers={'X-Auth-Token': token}, + expected_status_code=http.client.METHOD_NOT_ALLOWED, + ) # DELETE/PATCH/PUT should raise 405 with trailing '/', it is # remapped to without the trailing '/' by the normalization # middleware. - c.delete('/v3/OS-FEDERATION/identity_providers/%(idp_id)s' - '/protocols/' % {'idp_id': idp_id}, - headers={'X-Auth-Token': token}, - expected_status_code=http.client.METHOD_NOT_ALLOWED) - c.patch('/v3/OS-FEDERATION/identity_providers/%(idp_id)s' - '/protocols/' % {'idp_id': idp_id}, - json={'protocol': protocol}, - headers={'X-Auth-Token': token}, - expected_status_code=http.client.METHOD_NOT_ALLOWED) - c.put('/v3/OS-FEDERATION/identity_providers/%(idp_id)s' - '/protocols/' % {'idp_id': idp_id}, - json={'protocol': protocol}, - headers={'X-Auth-Token': token}, - expected_status_code=http.client.METHOD_NOT_ALLOWED) + c.delete( + '/v3/OS-FEDERATION/identity_providers/%(idp_id)s' + '/protocols/' % {'idp_id': idp_id}, + headers={'X-Auth-Token': token}, + expected_status_code=http.client.METHOD_NOT_ALLOWED, + ) + c.patch( + '/v3/OS-FEDERATION/identity_providers/%(idp_id)s' + '/protocols/' % {'idp_id': idp_id}, + json={'protocol': protocol}, + headers={'X-Auth-Token': token}, + expected_status_code=http.client.METHOD_NOT_ALLOWED, + ) + c.put( + '/v3/OS-FEDERATION/identity_providers/%(idp_id)s' + '/protocols/' % {'idp_id': idp_id}, + json={'protocol': protocol}, + headers={'X-Auth-Token': token}, + expected_status_code=http.client.METHOD_NOT_ALLOWED, + ) def test_get_head_protocol(self): """Create and later fetch protocol tied to IdP.""" resp, idp_id, proto = self._assign_protocol_to_idp( - expected_status=http.client.CREATED) + expected_status=http.client.CREATED + ) proto_id = self._fetch_attribute_from_response(resp, 'protocol')['id'] url = "%s/protocols/%s" % (idp_id, proto_id) url = self.base_url(suffix=url) @@ -1638,10 +1608,13 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): # Strip keys out of `body` dictionary. This is done # to be python 3 compatible reference_keys = list(reference) - self.assertValidResponse(resp, 'protocol', - dummy_validator, - keys_to_check=reference_keys, - ref=reference) + self.assertValidResponse( + resp, + 'protocol', + dummy_validator, + keys_to_check=reference_keys, + ref=reference, + ) self.head(url, expected_status=http.client.OK) @@ -1652,13 +1625,14 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): """ resp, idp_id, proto = self._assign_protocol_to_idp( - expected_status=http.client.CREATED) + expected_status=http.client.CREATED + ) iterations = random.randint(0, 16) protocol_ids = [] for _ in range(iterations): resp, _, proto = self._assign_protocol_to_idp( - idp_id=idp_id, - expected_status=http.client.CREATED) + idp_id=idp_id, expected_status=http.client.CREATED + ) proto_id = self._fetch_attribute_from_response(resp, 'protocol') proto_id = proto_id['id'] protocol_ids.append(proto_id) @@ -1666,9 +1640,9 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): url = "%s/protocols" % idp_id url = self.base_url(suffix=url) resp = self.get(url) - self.assertValidListResponse(resp, 'protocols', - dummy_validator, - keys_to_check=['id']) + self.assertValidListResponse( + resp, 'protocols', dummy_validator, keys_to_check=['id'] + ) entities = self._fetch_attribute_from_response(resp, 'protocols') entities = set([entity['id'] for entity in entities]) protocols_intersection = entities.intersection(protocol_ids) @@ -1679,7 +1653,8 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): def test_update_protocols_attribute(self): """Update protocol's attribute.""" resp, idp_id, proto = self._assign_protocol_to_idp( - expected_status=http.client.CREATED) + expected_status=http.client.CREATED + ) new_mapping_id = uuid.uuid4().hex self._create_mapping(mapping_id=new_mapping_id) @@ -1687,11 +1662,13 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): url = self.base_url(suffix=url) body = {'mapping_id': new_mapping_id} resp = self.patch(url, body={'protocol': body}) - self.assertValidResponse(resp, 'protocol', dummy_validator, - keys_to_check=['id', 'mapping_id'], - ref={'id': proto, - 'mapping_id': new_mapping_id} - ) + self.assertValidResponse( + resp, + 'protocol', + dummy_validator, + keys_to_check=['id', 'mapping_id'], + ref={'id': proto, 'mapping_id': new_mapping_id}, + ) def test_delete_protocol(self): """Delete protocol. @@ -1700,12 +1677,11 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase): deleted. """ - url = self.base_url(suffix='%(idp_id)s/' - 'protocols/%(protocol_id)s') + url = self.base_url(suffix='%(idp_id)s/' 'protocols/%(protocol_id)s') resp, idp_id, proto = self._assign_protocol_to_idp( - expected_status=http.client.CREATED) - url = url % {'idp_id': idp_id, - 'protocol_id': proto} + expected_status=http.client.CREATED + ) + url = url % {'idp_id': idp_id, 'protocol_id': proto} self.delete(url) self.get(url, expected_status=http.client.NOT_FOUND) @@ -1722,7 +1698,8 @@ class MappingCRUDTests(test_v3.RestfulTestCase): self.assertValidMapping, keys_to_check=[], *args, - **kwargs) + **kwargs + ) def assertValidMappingResponse(self, resp, *args, **kwargs): return self.assertValidResponse( @@ -1731,7 +1708,8 @@ class MappingCRUDTests(test_v3.RestfulTestCase): self.assertValidMapping, keys_to_check=[], *args, - **kwargs) + **kwargs + ) def assertValidMapping(self, entity, ref=None): self.assertIsNotNone(entity.get('id')) @@ -1742,9 +1720,11 @@ class MappingCRUDTests(test_v3.RestfulTestCase): def _create_default_mapping_entry(self): url = self.MAPPING_URL + uuid.uuid4().hex - resp = self.put(url, - body={'mapping': mapping_fixtures.MAPPING_LARGE}, - expected_status=http.client.CREATED) + resp = self.put( + url, + body={'mapping': mapping_fixtures.MAPPING_LARGE}, + expected_status=http.client.CREATED, + ) return resp def _get_id_from_response(self, resp): @@ -1789,8 +1769,9 @@ class MappingCRUDTests(test_v3.RestfulTestCase): resp = self._create_default_mapping_entry() mapping_id = self._get_id_from_response(resp) url = url % {'mapping_id': mapping_id} - resp = self.patch(url, - body={'mapping': mapping_fixtures.MAPPING_SMALL}) + resp = self.patch( + url, body={'mapping': mapping_fixtures.MAPPING_SMALL} + ) self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_SMALL) resp = self.get(url) self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_SMALL) @@ -1805,66 +1786,100 @@ class MappingCRUDTests(test_v3.RestfulTestCase): def test_create_mapping_bad_requirements(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_BAD_REQ}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping_fixtures.MAPPING_BAD_REQ}, + ) def test_create_mapping_no_rules(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_NO_RULES}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping_fixtures.MAPPING_NO_RULES}, + ) def test_create_mapping_no_remote_objects(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_NO_REMOTE}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping_fixtures.MAPPING_NO_REMOTE}, + ) def test_create_mapping_bad_value(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_BAD_VALUE}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping_fixtures.MAPPING_BAD_VALUE}, + ) def test_create_mapping_missing_local(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_MISSING_LOCAL}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping_fixtures.MAPPING_MISSING_LOCAL}, + ) def test_create_mapping_missing_type(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_MISSING_TYPE}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping_fixtures.MAPPING_MISSING_TYPE}, + ) def test_create_mapping_wrong_type(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_WRONG_TYPE}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping_fixtures.MAPPING_WRONG_TYPE}, + ) def test_create_mapping_extra_remote_properties_not_any_of(self): url = self.MAPPING_URL + uuid.uuid4().hex mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping}, + ) def test_create_mapping_extra_remote_properties_any_one_of(self): url = self.MAPPING_URL + uuid.uuid4().hex mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping}, + ) def test_create_mapping_extra_remote_properties_just_type(self): url = self.MAPPING_URL + uuid.uuid4().hex mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping}, + ) def test_create_mapping_empty_map(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': {}}) + self.put( + url, expected_status=http.client.BAD_REQUEST, body={'mapping': {}} + ) def test_create_mapping_extra_rules_properties(self): url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_EXTRA_RULES_PROPS}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping_fixtures.MAPPING_EXTRA_RULES_PROPS}, + ) def test_create_mapping_with_blacklist_and_whitelist(self): """Test for adding whitelist and blacklist in the rule. @@ -1875,28 +1890,33 @@ class MappingCRUDTests(test_v3.RestfulTestCase): """ url = self.MAPPING_URL + uuid.uuid4().hex mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_AND_BLACKLIST - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': mapping}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': mapping}, + ) def test_create_mapping_with_local_user_and_local_domain(self): url = self.MAPPING_URL + uuid.uuid4().hex resp = self.put( url, - body={ - 'mapping': mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN - }, - expected_status=http.client.CREATED) + body={'mapping': mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN}, + expected_status=http.client.CREATED, + ) self.assertValidMappingResponse( - resp, mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN) + resp, mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN + ) def test_create_mapping_with_ephemeral(self): url = self.MAPPING_URL + uuid.uuid4().hex resp = self.put( url, body={'mapping': mapping_fixtures.MAPPING_EPHEMERAL_USER}, - expected_status=http.client.CREATED) + expected_status=http.client.CREATED, + ) self.assertValidMappingResponse( - resp, mapping_fixtures.MAPPING_EPHEMERAL_USER) + resp, mapping_fixtures.MAPPING_EPHEMERAL_USER + ) def test_create_mapping_with_bad_user_type(self): url = self.MAPPING_URL + uuid.uuid4().hex @@ -1904,8 +1924,11 @@ class MappingCRUDTests(test_v3.RestfulTestCase): bad_mapping = copy.deepcopy(mapping_fixtures.MAPPING_EPHEMERAL_USER) # now sabotage the user type bad_mapping['rules'][0]['local'][0]['user']['type'] = uuid.uuid4().hex - self.put(url, expected_status=http.client.BAD_REQUEST, - body={'mapping': bad_mapping}) + self.put( + url, + expected_status=http.client.BAD_REQUEST, + body={'mapping': bad_mapping}, + ) def test_create_shadow_mapping_without_roles_fails(self): """Validate that mappings with projects contain roles when created.""" @@ -1913,7 +1936,7 @@ class MappingCRUDTests(test_v3.RestfulTestCase): self.put( url, body={'mapping': mapping_fixtures.MAPPING_PROJECTS_WITHOUT_ROLES}, - expected_status=http.client.BAD_REQUEST + expected_status=http.client.BAD_REQUEST, ) def test_update_shadow_mapping_without_roles_fails(self): @@ -1922,7 +1945,7 @@ class MappingCRUDTests(test_v3.RestfulTestCase): resp = self.put( url, body={'mapping': mapping_fixtures.MAPPING_PROJECTS}, - expected_status=http.client.CREATED + expected_status=http.client.CREATED, ) self.assertValidMappingResponse( resp, mapping_fixtures.MAPPING_PROJECTS @@ -1930,7 +1953,7 @@ class MappingCRUDTests(test_v3.RestfulTestCase): self.patch( url, body={'mapping': mapping_fixtures.MAPPING_PROJECTS_WITHOUT_ROLES}, - expected_status=http.client.BAD_REQUEST + expected_status=http.client.BAD_REQUEST, ) def test_create_shadow_mapping_without_name_fails(self): @@ -1939,7 +1962,7 @@ class MappingCRUDTests(test_v3.RestfulTestCase): self.put( url, body={'mapping': mapping_fixtures.MAPPING_PROJECTS_WITHOUT_NAME}, - expected_status=http.client.BAD_REQUEST + expected_status=http.client.BAD_REQUEST, ) def test_update_shadow_mapping_without_name_fails(self): @@ -1948,7 +1971,7 @@ class MappingCRUDTests(test_v3.RestfulTestCase): resp = self.put( url, body={'mapping': mapping_fixtures.MAPPING_PROJECTS}, - expected_status=http.client.CREATED + expected_status=http.client.CREATED, ) self.assertValidMappingResponse( resp, mapping_fixtures.MAPPING_PROJECTS @@ -1956,7 +1979,7 @@ class MappingCRUDTests(test_v3.RestfulTestCase): self.patch( url, body={'mapping': mapping_fixtures.MAPPING_PROJECTS_WITHOUT_NAME}, - expected_status=http.client.BAD_REQUEST + expected_status=http.client.BAD_REQUEST, ) @@ -1970,23 +1993,33 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): super(FederatedTokenTests, self).setUp() self._notifications = [] - def fake_saml_notify(action, user_id, group_ids, - identity_provider, protocol, token_id, outcome): + def fake_saml_notify( + action, + user_id, + group_ids, + identity_provider, + protocol, + token_id, + outcome, + ): note = { 'action': action, 'user_id': user_id, 'identity_provider': identity_provider, 'protocol': protocol, - 'send_notification_called': True} + 'send_notification_called': True, + } self._notifications.append(note) - self.useFixture(fixtures.MockPatchObject( - notifications, - 'send_saml_audit_notification', - fake_saml_notify)) + self.useFixture( + fixtures.MockPatchObject( + notifications, 'send_saml_audit_notification', fake_saml_notify + ) + ) - def _assert_last_notify(self, action, identity_provider, protocol, - user_id=None): + def _assert_last_notify( + self, action, identity_provider, protocol, user_id=None + ): self.assertTrue(self._notifications) note = self._notifications[-1] if user_id: @@ -2024,8 +2057,9 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): # Give the user a direct role assignment on the default domain, so they # can get a federated domain-scoped token. PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], user_id=token.user_id, - domain_id=CONF.identity.default_domain_id + self.role_admin['id'], + user_id=token.user_id, + domain_id=CONF.identity.default_domain_id, ) # Get a token scoped to the default domain with an ID of `default`, @@ -2033,19 +2067,8 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): # accordingly in the token formatters/providers. auth_request = { 'auth': { - 'identity': { - 'methods': [ - 'token' - ], - 'token': { - 'id': token.id - } - }, - 'scope': { - 'domain': { - 'id': CONF.identity.default_domain_id - } - } + 'identity': {'methods': ['token'], 'token': {'id': token.id}}, + 'scope': {'domain': {'id': CONF.identity.default_domain_id}}, } } r = self.v3_create_token(auth_request) @@ -2054,11 +2077,7 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): # Validate the token to make sure the token providers handle non-uuid # domain IDs properly. headers = {'X-Subject-Token': domain_scoped_token_id} - self.get( - '/auth/tokens', - token=domain_scoped_token_id, - headers=headers - ) + self.get('/auth/tokens', token=domain_scoped_token_id, headers=headers) def test_issue_the_same_unscoped_token_with_user_deleted(self): r = self._issue_unscoped_token() @@ -2091,8 +2110,7 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): """ enabled_false = {'enabled': False} PROVIDERS.federation_api.update_idp(self.IDP, enabled_false) - self.assertRaises(exception.Forbidden, - self._issue_unscoped_token) + self.assertRaises(exception.Forbidden, self._issue_unscoped_token) def test_issue_unscoped_token_group_names_in_mapping(self): r = self._issue_unscoped_token(assertion='ANOTHER_CUSTOMER_ASSERTION') @@ -2105,39 +2123,39 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): self._issue_unscoped_token(assertion='ANOTHER_TESTER_ASSERTION') def test_issue_unscoped_token_with_remote_no_attribute(self): - self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: - self.REMOTE_IDS[0] - }) + self._issue_unscoped_token( + idp=self.IDP_WITH_REMOTE, + environment={self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}, + ) def test_issue_unscoped_token_with_remote(self): - self.config_fixture.config(group='federation', - remote_id_attribute=self.REMOTE_ID_ATTR) - self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: - self.REMOTE_IDS[0] - }) + self.config_fixture.config( + group='federation', remote_id_attribute=self.REMOTE_ID_ATTR + ) + self._issue_unscoped_token( + idp=self.IDP_WITH_REMOTE, + environment={self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}, + ) def test_issue_unscoped_token_with_saml2_remote(self): - self.config_fixture.config(group='saml2', - remote_id_attribute=self.REMOTE_ID_ATTR) - self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: - self.REMOTE_IDS[0] - }) + self.config_fixture.config( + group='saml2', remote_id_attribute=self.REMOTE_ID_ATTR + ) + self._issue_unscoped_token( + idp=self.IDP_WITH_REMOTE, + environment={self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}, + ) def test_issue_unscoped_token_with_remote_different(self): - self.config_fixture.config(group='federation', - remote_id_attribute=self.REMOTE_ID_ATTR) - self.assertRaises(exception.Forbidden, - self._issue_unscoped_token, - idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: uuid.uuid4().hex - }) + self.config_fixture.config( + group='federation', remote_id_attribute=self.REMOTE_ID_ATTR + ) + self.assertRaises( + exception.Forbidden, + self._issue_unscoped_token, + idp=self.IDP_WITH_REMOTE, + environment={self.REMOTE_ID_ATTR: uuid.uuid4().hex}, + ) def test_issue_unscoped_token_with_remote_default_overwritten(self): """Test that protocol remote_id_attribute has higher priority. @@ -2147,25 +2165,27 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): section. """ - self.config_fixture.config(group='saml2', - remote_id_attribute=self.REMOTE_ID_ATTR) - self.config_fixture.config(group='federation', - remote_id_attribute=uuid.uuid4().hex) - self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: - self.REMOTE_IDS[0] - }) + self.config_fixture.config( + group='saml2', remote_id_attribute=self.REMOTE_ID_ATTR + ) + self.config_fixture.config( + group='federation', remote_id_attribute=uuid.uuid4().hex + ) + self._issue_unscoped_token( + idp=self.IDP_WITH_REMOTE, + environment={self.REMOTE_ID_ATTR: self.REMOTE_IDS[0]}, + ) def test_issue_unscoped_token_with_remote_unavailable(self): - self.config_fixture.config(group='federation', - remote_id_attribute=self.REMOTE_ID_ATTR) - self.assertRaises(exception.Unauthorized, - self._issue_unscoped_token, - idp=self.IDP_WITH_REMOTE, - environment={ - uuid.uuid4().hex: uuid.uuid4().hex - }) + self.config_fixture.config( + group='federation', remote_id_attribute=self.REMOTE_ID_ATTR + ) + self.assertRaises( + exception.Unauthorized, + self._issue_unscoped_token, + idp=self.IDP_WITH_REMOTE, + environment={uuid.uuid4().hex: uuid.uuid4().hex}, + ) def test_issue_unscoped_token_with_remote_user_as_empty_string(self): # make sure that REMOTE_USER set as the empty string won't interfere @@ -2192,16 +2212,16 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): admin = unit.new_user_ref(CONF.identity.default_domain_id) PROVIDERS.identity_api.create_user(admin) PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], user_id=admin['id'], - project_id=self.proj_employees['id'] + self.role_admin['id'], + user_id=admin['id'], + project_id=self.proj_employees['id'], ) # try to scope the token. It should fail scope = self._scope_request( unscoped_token, 'project', self.proj_employees['id'] ) - self.v3_create_token( - scope, expected_status=http.client.UNAUTHORIZED) + self.v3_create_token(scope, expected_status=http.client.UNAUTHORIZED) def test_issue_unscoped_token_malformed_environment(self): """Test whether non string objects are filtered out. @@ -2215,7 +2235,7 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): environ = { 'malformed_object': object(), 'another_bad_idea': tuple(range(10)), - 'yet_another_bad_param': dict(zip(uuid.uuid4().hex, range(32))) + 'yet_another_bad_param': dict(zip(uuid.uuid4().hex, range(32))), } environ.update(mapping_fixtures.EMPLOYEE_ASSERTION) with self.make_request(environ=environ): @@ -2223,13 +2243,15 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): def test_scope_to_project_once_notify(self): r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE) + self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE + ) user_id = r.json['token']['user']['id'] self._assert_last_notify(self.ACTION, self.IDP, self.PROTOCOL, user_id) def test_scope_to_project_once(self): r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE) + self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE + ) token_resp = r.result['token'] project_id = token_resp['project']['id'] self._check_project_scoped_token_attributes(token_resp, project_id) @@ -2260,7 +2282,8 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): PROVIDERS.federation_api.update_idp(self.IDP, enabled_false) self.v3_create_token( self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER, - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_validate_token_after_deleting_idp_raises_not_found(self): token = self.v3_create_token( @@ -2270,9 +2293,7 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): federated_info = token.json_body['token']['user']['OS-FEDERATION'] idp_id = federated_info['identity_provider']['id'] PROVIDERS.federation_api.delete_idp(idp_id) - headers = { - 'X-Subject-Token': token_id - } + headers = {'X-Subject-Token': token_id} # NOTE(lbragstad): This raises a 404 NOT FOUND because the identity # provider is no longer present. We raise 404 NOT FOUND when we # validate a token and a project or domain no longer exists. @@ -2280,7 +2301,7 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): '/auth/tokens/', token=token_id, headers=headers, - expected_status=http.client.NOT_FOUND + expected_status=http.client.NOT_FOUND, ) def test_deleting_idp_cascade_deleting_fed_user(self): @@ -2313,7 +2334,8 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): """Scope unscoped token with a project we don't have access to.""" self.v3_create_token( self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER, - expected_status=http.client.UNAUTHORIZED) + expected_status=http.client.UNAUTHORIZED, + ) def test_scope_to_project_multiple_times(self): """Try to scope the unscoped token multiple times. @@ -2324,15 +2346,17 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): * Employees' project """ - bodies = (self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN, - self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN) - project_ids = (self.proj_employees['id'], - self.proj_customers['id']) + bodies = ( + self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN, + self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN, + ) + project_ids = (self.proj_employees['id'], self.proj_customers['id']) for body, project_id_ref in zip(bodies, project_ids): r = self.v3_create_token(body) token_resp = r.result['token'] - self._check_project_scoped_token_attributes(token_resp, - project_id_ref) + self._check_project_scoped_token_attributes( + token_resp, project_id_ref + ) def test_scope_to_project_with_duplicate_roles_returns_single_role(self): r = self.v3_create_token(self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN) @@ -2360,10 +2384,12 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): def test_scope_to_project_with_only_inherited_roles(self): """Try to scope token whose only roles are inherited.""" r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER) + self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER + ) token_resp = r.result['token'] self._check_project_scoped_token_attributes( - token_resp, self.project_inherited['id']) + token_resp, self.project_inherited['id'] + ) roles_ref = [self.role_customer] projects_ref = self.project_inherited self._check_projects_and_roles(token_resp, roles_ref, projects_ref) @@ -2373,14 +2399,17 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): """Try to scope token from non-existent unscoped token.""" self.v3_create_token( self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN, - expected_status=http.client.NOT_FOUND) + expected_status=http.client.NOT_FOUND, + ) def test_issue_token_from_rules_without_user(self): environ = copy.deepcopy(mapping_fixtures.BAD_TESTER_ASSERTION) with self.make_request(environ=environ): - self.assertRaises(exception.Unauthorized, - authentication.authenticate_for_token, - self.UNSCOPED_V3_SAML2_REQ) + self.assertRaises( + exception.Unauthorized, + authentication.authenticate_for_token, + self.UNSCOPED_V3_SAML2_REQ, + ) def test_issue_token_with_nonexistent_group(self): """Inject assertion that matches rule issuing bad group id. @@ -2389,15 +2418,18 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): backend and raise exception.MappedGroupNotFound exception. """ - self.assertRaises(exception.MappedGroupNotFound, - self._issue_unscoped_token, - assertion='CONTRACTOR_ASSERTION') + self.assertRaises( + exception.MappedGroupNotFound, + self._issue_unscoped_token, + assertion='CONTRACTOR_ASSERTION', + ) def test_scope_to_domain_once(self): r = self.v3_create_token(self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER) token_resp = r.result['token'] - self._check_domain_scoped_token_attributes(token_resp, - self.domainA['id']) + self._check_domain_scoped_token_attributes( + token_resp, self.domainA['id'] + ) def test_scope_to_domain_multiple_tokens(self): """Issue multiple tokens scoping to different domains. @@ -2409,48 +2441,61 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): * domainC """ - bodies = (self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN, - self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN, - self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN) - domain_ids = (self.domainA['id'], - self.domainB['id'], - self.domainC['id']) + bodies = ( + self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN, + self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN, + self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN, + ) + domain_ids = ( + self.domainA['id'], + self.domainB['id'], + self.domainC['id'], + ) for body, domain_id_ref in zip(bodies, domain_ids): r = self.v3_create_token(body) token_resp = r.result['token'] - self._check_domain_scoped_token_attributes(token_resp, - domain_id_ref) + self._check_domain_scoped_token_attributes( + token_resp, domain_id_ref + ) def test_scope_to_domain_with_only_inherited_roles_fails(self): """Try to scope to a domain that has no direct roles.""" self.v3_create_token( self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER, - expected_status=http.client.UNAUTHORIZED) + expected_status=http.client.UNAUTHORIZED, + ) def test_list_projects(self): urls = ('/OS-FEDERATION/projects', '/auth/projects') - token = (self.tokens['CUSTOMER_ASSERTION'], - self.tokens['EMPLOYEE_ASSERTION'], - self.tokens['ADMIN_ASSERTION']) + token = ( + self.tokens['CUSTOMER_ASSERTION'], + self.tokens['EMPLOYEE_ASSERTION'], + self.tokens['ADMIN_ASSERTION'], + ) - projects_refs = (set([self.proj_customers['id'], - self.project_inherited['id']]), - set([self.proj_employees['id'], - self.project_all['id']]), - set([self.proj_employees['id'], - self.project_all['id'], - self.proj_customers['id'], - self.project_inherited['id']])) + projects_refs = ( + set([self.proj_customers['id'], self.project_inherited['id']]), + set([self.proj_employees['id'], self.project_all['id']]), + set( + [ + self.proj_employees['id'], + self.project_all['id'], + self.proj_customers['id'], + self.project_inherited['id'], + ] + ), + ) for token, projects_ref in zip(token, projects_refs): for url in urls: r = self.get(url, token=token) projects_resp = r.result['projects'] projects = set(p['id'] for p in projects_resp) - self.assertEqual(projects_ref, projects, - 'match failed for url %s' % url) + self.assertEqual( + projects_ref, projects, 'match failed for url %s' % url + ) # TODO(samueldmq): Create another test class for role inheritance tests. # The advantage would be to reduce the complexity of this test class and @@ -2460,7 +2505,8 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): # Create a subproject subproject_inherited = unit.new_project_ref( domain_id=self.domainD['id'], - parent_id=self.project_inherited['id']) + parent_id=self.project_inherited['id'], + ) PROVIDERS.resource_api.create_project( subproject_inherited['id'], subproject_inherited ) @@ -2470,13 +2516,16 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): role_id=self.role_employee['id'], group_id=self.group_employees['id'], project_id=self.project_inherited['id'], - inherited_to_projects=True) + inherited_to_projects=True, + ) # Define expected projects from employee assertion, which contain # the created subproject - expected_project_ids = [self.project_all['id'], - self.proj_employees['id'], - subproject_inherited['id']] + expected_project_ids = [ + self.project_all['id'], + self.proj_employees['id'], + subproject_inherited['id'], + ] # Assert expected projects for both available URLs for url in ('/OS-FEDERATION/projects', '/auth/projects'): @@ -2485,34 +2534,39 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): self.assertEqual(len(expected_project_ids), len(project_ids)) for expected_project_id in expected_project_ids: - self.assertIn(expected_project_id, project_ids, - 'Projects match failed for url %s' % url) + self.assertIn( + expected_project_id, + project_ids, + 'Projects match failed for url %s' % url, + ) def test_list_domains(self): urls = ('/OS-FEDERATION/domains', '/auth/domains') - tokens = (self.tokens['CUSTOMER_ASSERTION'], - self.tokens['EMPLOYEE_ASSERTION'], - self.tokens['ADMIN_ASSERTION']) + tokens = ( + self.tokens['CUSTOMER_ASSERTION'], + self.tokens['EMPLOYEE_ASSERTION'], + self.tokens['ADMIN_ASSERTION'], + ) # NOTE(henry-nash): domain D does not appear in the expected results # since it only had inherited roles (which only apply to projects # within the domain) - domain_refs = (set([self.domainA['id']]), - set([self.domainA['id'], - self.domainB['id']]), - set([self.domainA['id'], - self.domainB['id'], - self.domainC['id']])) + domain_refs = ( + set([self.domainA['id']]), + set([self.domainA['id'], self.domainB['id']]), + set([self.domainA['id'], self.domainB['id'], self.domainC['id']]), + ) for token, domains_ref in zip(tokens, domain_refs): for url in urls: r = self.get(url, token=token) domains_resp = r.result['domains'] domains = set(p['id'] for p in domains_resp) - self.assertEqual(domains_ref, domains, - 'match failed for url %s' % url) + self.assertEqual( + domains_ref, domains, 'match failed for url %s' % url + ) def test_full_workflow(self): """Test 'standard' workflow for granting access tokens. @@ -2533,8 +2587,9 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): random_project = random.randint(0, len(projects) - 1) project = projects[random_project] - v3_scope_request = self._scope_request(employee_unscoped_token_id, - 'project', project['id']) + v3_scope_request = self._scope_request( + employee_unscoped_token_id, 'project', project['id'] + ) r = self.v3_create_token(v3_scope_request) token_resp = r.result['token'] @@ -2570,28 +2625,13 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): 'rules': [ { 'local': [ - { - 'group': { - 'id': group['id'] - } - }, - { - 'user': { - 'name': '{0}' - } - } + {'group': {'id': group['id']}}, + {'user': {'name': '{0}'}}, ], 'remote': [ - { - 'type': 'UserName' - }, - { - 'type': 'LastName', - 'any_one_of': [ - 'Account' - ] - } - ] + {'type': 'UserName'}, + {'type': 'LastName', 'any_one_of': ['Account']}, + ], } ] } @@ -2605,11 +2645,12 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): # scope token to project_all, expect HTTP 500 scoped_token = self._scope_request( - r.id, 'project', - self.project_all['id']) + r.id, 'project', self.project_all['id'] + ) self.v3_create_token( - scoped_token, expected_status=http.client.INTERNAL_SERVER_ERROR) + scoped_token, expected_status=http.client.INTERNAL_SERVER_ERROR + ) def test_lists_with_missing_group_in_backend(self): """Test a mapping that points to a group that does not exist. @@ -2633,33 +2674,19 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): rules = { 'rules': [ { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] + "local": [{"user": {"name": "{0}", "id": "{0}"}}], + "remote": [{"type": "REMOTE_USER"}], }, { "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } + {"groups": "{0}", "domain": {"name": domain_name}} ], "remote": [ { "type": "REMOTE_USER_GROUPS", } - ] - } + ], + }, ] } PROVIDERS.federation_api.update_mapping(self.mapping['id'], rules) @@ -2696,8 +2723,9 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): group_exists = PROVIDERS.identity_api.create_group(group_exists) # Add a group "NO_EXISTS" - group_no_exists = unit.new_group_ref(domain_id=domain_id, - name='NO_EXISTS') + group_no_exists = unit.new_group_ref( + domain_id=domain_id, name='NO_EXISTS' + ) group_no_exists = PROVIDERS.identity_api.create_group(group_no_exists) group_ids = set([group_exists['id'], group_no_exists['id']]) @@ -2705,34 +2733,17 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): rules = { 'rules': [ { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] + "local": [{"user": {"name": "{0}", "id": "{0}"}}], + "remote": [{"type": "REMOTE_USER"}], }, { "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } + {"groups": "{0}", "domain": {"name": domain_name}} ], "remote": [ - { - "type": "REMOTE_USER_GROUPS", - "blacklist": [] - } - ] - } + {"type": "REMOTE_USER_GROUPS", "blacklist": []} + ], + }, ] } PROVIDERS.federation_api.update_mapping(self.mapping['id'], rules) @@ -2765,13 +2776,13 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): domain_name = self.domainA['name'] # Add a group "EXISTS" - group_exists = unit.new_group_ref(domain_id=domain_id, - name='EXISTS') + group_exists = unit.new_group_ref(domain_id=domain_id, name='EXISTS') group_exists = PROVIDERS.identity_api.create_group(group_exists) # Add a group "NO_EXISTS" - group_no_exists = unit.new_group_ref(domain_id=domain_id, - name='NO_EXISTS') + group_no_exists = unit.new_group_ref( + domain_id=domain_id, name='NO_EXISTS' + ) group_no_exists = PROVIDERS.identity_api.create_group(group_no_exists) group_ids = set([group_exists['id'], group_no_exists['id']]) @@ -2779,33 +2790,19 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): rules = { 'rules': [ { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] + "local": [{"user": {"name": "{0}", "id": "{0}"}}], + "remote": [{"type": "REMOTE_USER"}], }, { "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } + {"groups": "{0}", "domain": {"name": domain_name}} ], "remote": [ { "type": "REMOTE_USER_GROUPS", } - ] - } + ], + }, ] } PROVIDERS.federation_api.update_mapping(self.mapping['id'], rules) @@ -2840,34 +2837,17 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): rules = { 'rules': [ { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] + "local": [{"user": {"name": "{0}", "id": "{0}"}}], + "remote": [{"type": "REMOTE_USER"}], }, { "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } + {"groups": "{0}", "domain": {"name": domain_name}} ], "remote": [ - { - "type": "REMOTE_USER_GROUPS", - "whitelist": [] - } - ] - } + {"type": "REMOTE_USER_GROUPS", "whitelist": []} + ], + }, ] } PROVIDERS.federation_api.update_mapping(self.mapping['id'], rules) @@ -2897,13 +2877,13 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): domain_name = self.domainA['name'] # Add a group "EXISTS" - group_exists = unit.new_group_ref(domain_id=domain_id, - name='EXISTS') + group_exists = unit.new_group_ref(domain_id=domain_id, name='EXISTS') group_exists = PROVIDERS.identity_api.create_group(group_exists) # Add a group "NO_EXISTS" - group_no_exists = unit.new_group_ref(domain_id=domain_id, - name='NO_EXISTS') + group_no_exists = unit.new_group_ref( + domain_id=domain_id, name='NO_EXISTS' + ) group_no_exists = PROVIDERS.identity_api.create_group(group_no_exists) group_ids = set([group_exists['id'], group_no_exists['id']]) @@ -2911,33 +2891,19 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): rules = { 'rules': [ { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] + "local": [{"user": {"name": "{0}", "id": "{0}"}}], + "remote": [{"type": "REMOTE_USER"}], }, { "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } + {"groups": "{0}", "domain": {"name": domain_name}} ], "remote": [ { "type": "REMOTE_USER_GROUPS", } - ] - } + ], + }, ] } PROVIDERS.federation_api.update_mapping(self.mapping['id'], rules) @@ -2955,8 +2921,9 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): Expect server to return unscoped token. """ - self.config_fixture.config(group='federation', - assertion_prefix=self.ASSERTION_PREFIX) + self.config_fixture.config( + group='federation', assertion_prefix=self.ASSERTION_PREFIX + ) self._issue_unscoped_token(assertion='EMPLOYEE_ASSERTION_PREFIXED') def test_assertion_prefix_parameter_expect_fail(self): @@ -2970,20 +2937,22 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): """ self._issue_unscoped_token() - self.config_fixture.config(group='federation', - assertion_prefix='UserName') + self.config_fixture.config( + group='federation', assertion_prefix='UserName' + ) - self.assertRaises(exception.Unauthorized, - self._issue_unscoped_token) + self.assertRaises(exception.Unauthorized, self._issue_unscoped_token) def test_unscoped_token_has_user_domain(self): r = self._issue_unscoped_token() self._check_domains_are_valid( - render_token.render_token_response_from_model(r)['token']) + render_token.render_token_response_from_model(r)['token'] + ) def test_scoped_token_has_user_domain(self): r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE) + self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE + ) self._check_domains_are_valid(r.json_body['token']) def test_issue_unscoped_token_for_local_user(self): @@ -2998,22 +2967,26 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): self.assertTrue(r.unscoped) def test_issue_token_for_local_user_user_not_found(self): - self.assertRaises(exception.Unauthorized, - self._issue_unscoped_token, - assertion='ANOTHER_LOCAL_USER_ASSERTION') + self.assertRaises( + exception.Unauthorized, + self._issue_unscoped_token, + assertion='ANOTHER_LOCAL_USER_ASSERTION', + ) def test_user_name_and_id_in_federation_token(self): r = self._issue_unscoped_token(assertion='EMPLOYEE_ASSERTION') self.assertEqual( - mapping_fixtures.EMPLOYEE_ASSERTION['UserName'], - r.user['name']) + mapping_fixtures.EMPLOYEE_ASSERTION['UserName'], r.user['name'] + ) self.assertNotEqual(r.user['name'], r.user_id) r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE) + self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE + ) token = r.json_body['token'] self.assertEqual( mapping_fixtures.EMPLOYEE_ASSERTION['UserName'], - token['user']['name']) + token['user']['name'], + ) self.assertNotEqual(token['user']['name'], token['user']['id']) def test_issue_unscoped_token_with_remote_different_from_protocol(self): @@ -3026,15 +2999,13 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): ) self._issue_unscoped_token( idp=self.IDP_WITH_REMOTE, - environment={ - protocol['remote_id_attribute']: self.REMOTE_IDS[0] - } + environment={protocol['remote_id_attribute']: self.REMOTE_IDS[0]}, ) self.assertRaises( exception.Unauthorized, self._issue_unscoped_token, idp=self.IDP_WITH_REMOTE, - environment={uuid.uuid4().hex: self.REMOTE_IDS[0]} + environment={uuid.uuid4().hex: self.REMOTE_IDS[0]}, ) def test_issue_token_for_ephemeral_user_with_remote_domain(self): @@ -3053,7 +3024,8 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): PROVIDERS.federation_api.update_mapping( self.mapping["id"], - mapping_fixtures.MAPPING_EPHEMERAL_USER_REMOTE_DOMAIN) + mapping_fixtures.MAPPING_EPHEMERAL_USER_REMOTE_DOMAIN, + ) r = self._issue_unscoped_token(assertion='USER_WITH_DOMAIN_ASSERTION') self.assertEqual(r.user_domain["id"], domain_ref["id"]) self.assertNotEqual(r.user_domain["id"], self.idp["domain_id"]) @@ -3073,25 +3045,28 @@ class FernetFederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) def auth_plugin_config_override(self): methods = ['saml2', 'token', 'password'] - super(FernetFederatedTokenTests, - self).auth_plugin_config_override(methods) + super(FernetFederatedTokenTests, self).auth_plugin_config_override( + methods + ) def test_federated_unscoped_token(self): resp = self._issue_unscoped_token() self.assertValidMappedUser( - render_token.render_token_response_from_model(resp)['token']) + render_token.render_token_response_from_model(resp)['token'] + ) def test_federated_unscoped_token_with_multiple_groups(self): assertion = 'ANOTHER_CUSTOMER_ASSERTION' resp = self._issue_unscoped_token(assertion=assertion) self.assertValidMappedUser( - render_token.render_token_response_from_model(resp)['token']) + render_token.render_token_response_from_model(resp)['token'] + ) def test_validate_federated_unscoped_token(self): resp = self._issue_unscoped_token() @@ -3109,15 +3084,17 @@ class FernetFederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): """ resp = self._issue_unscoped_token() self.assertValidMappedUser( - render_token.render_token_response_from_model(resp)['token']) + render_token.render_token_response_from_model(resp)['token'] + ) unscoped_token = resp.id resp = self.get('/auth/projects', token=unscoped_token) projects = resp.result['projects'] random_project = random.randint(0, len(projects) - 1) project = projects[random_project] - v3_scope_request = self._scope_request(unscoped_token, - 'project', project['id']) + v3_scope_request = self._scope_request( + unscoped_token, 'project', project['id'] + ) resp = self.v3_create_token(v3_scope_request) token_resp = resp.result['token'] @@ -3138,8 +3115,9 @@ class JWSFederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): def auth_plugin_config_override(self): methods = ['saml2', 'token', 'password'] - super(JWSFederatedTokenTests, - self).auth_plugin_config_override(methods) + super(JWSFederatedTokenTests, self).auth_plugin_config_override( + methods + ) def test_federated_unscoped_token(self): token_model = self._issue_unscoped_token() @@ -3178,8 +3156,9 @@ class JWSFederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): random_project = random.randint(0, len(projects) - 1) project = projects[random_project] - v3_scope_request = self._scope_request(unscoped_token, - 'project', project['id']) + v3_scope_request = self._scope_request( + unscoped_token, 'project', project['id'] + ) resp = self.v3_create_token(v3_scope_request) token_resp = resp.result['token'] @@ -3198,8 +3177,7 @@ class FederatedTokenTestsMethodToken(FederatedTokenTests): def auth_plugin_config_override(self): methods = ['saml2', 'token'] - super(FederatedTokenTests, - self).auth_plugin_config_override(methods) + super(FederatedTokenTests, self).auth_plugin_config_override(methods) def test_full_workflow(self): """Test 'standard' workflow for granting access tokens. @@ -3220,8 +3198,9 @@ class FederatedTokenTestsMethodToken(FederatedTokenTests): random_project = random.randint(0, len(projects) - 1) project = projects[random_project] - v3_scope_request = self._scope_request(employee_unscoped_token_id, - 'project', project['id']) + v3_scope_request = self._scope_request( + employee_unscoped_token_id, 'project', project['id'] + ) r = self.v3_create_token(v3_scope_request) token_resp = r.result['token'] @@ -3259,7 +3238,8 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): def test_user_role_assignment(self): # create project and role project_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) role_ref = unit.new_role_ref() PROVIDERS.role_api.create_role(role_ref['id'], role_ref) @@ -3269,37 +3249,48 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): # exchange an unscoped token for a scoped token; resulting in # unauthorized because the user doesn't have any role assignments - v3_scope_request = self._scope_request(unscoped_token, 'project', - project_ref['id']) - r = self.v3_create_token(v3_scope_request, - expected_status=http.client.UNAUTHORIZED) + v3_scope_request = self._scope_request( + unscoped_token, 'project', project_ref['id'] + ) + r = self.v3_create_token( + v3_scope_request, expected_status=http.client.UNAUTHORIZED + ) # assign project role to federated user PROVIDERS.assignment_api.add_role_to_user_and_project( - user_id, project_ref['id'], role_ref['id']) + user_id, project_ref['id'], role_ref['id'] + ) # exchange an unscoped token for a scoped token - r = self.v3_create_token(v3_scope_request, - expected_status=http.client.CREATED) + r = self.v3_create_token( + v3_scope_request, expected_status=http.client.CREATED + ) scoped_token = r.headers['X-Subject-Token'] # ensure user can access resource based on role assignment path = '/projects/%(project_id)s' % {'project_id': project_ref['id']} - r = self.v3_request(path=path, method='GET', - expected_status=http.client.OK, - token=scoped_token) + r = self.v3_request( + path=path, + method='GET', + expected_status=http.client.OK, + token=scoped_token, + ) self.assertValidProjectResponse(r, project_ref) # create a 2nd project project_ref2 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project_ref2['id'], project_ref2) # ensure the user cannot access the 2nd resource (forbidden) path = '/projects/%(project_id)s' % {'project_id': project_ref2['id']} - r = self.v3_request(path=path, method='GET', - expected_status=http.client.FORBIDDEN, - token=scoped_token) + r = self.v3_request( + path=path, + method='GET', + expected_status=http.client.FORBIDDEN, + token=scoped_token, + ) def test_domain_scoped_user_role_assignment(self): # create domain and role @@ -3313,10 +3304,12 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): # exchange an unscoped token for a scoped token; resulting in # unauthorized because the user doesn't have any role assignments - v3_scope_request = self._scope_request(unscoped_token, 'domain', - domain_ref['id']) - r = self.v3_create_token(v3_scope_request, - expected_status=http.client.UNAUTHORIZED) + v3_scope_request = self._scope_request( + unscoped_token, 'domain', domain_ref['id'] + ) + r = self.v3_create_token( + v3_scope_request, expected_status=http.client.UNAUTHORIZED + ) # assign domain role to user PROVIDERS.assignment_api.create_grant( @@ -3324,8 +3317,9 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): ) # exchange an unscoped token for domain scoped token and test - r = self.v3_create_token(v3_scope_request, - expected_status=http.client.CREATED) + r = self.v3_create_token( + v3_scope_request, expected_status=http.client.CREATED + ) self.assertIsNotNone(r.headers.get('X-Subject-Token')) token_resp = r.result['token'] self.assertIn('domain', token_resp) @@ -3333,7 +3327,8 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): def test_auth_projects_matches_federation_projects(self): # create project and role project_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) + domain_id=CONF.identity.default_domain_id + ) PROVIDERS.resource_api.create_project(project_ref['id'], project_ref) role_ref = unit.new_role_ref() PROVIDERS.role_api.create_role(role_ref['id'], role_ref) @@ -3343,7 +3338,8 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): # assign project role to federated user PROVIDERS.assignment_api.add_role_to_user_and_project( - user_id, project_ref['id'], role_ref['id']) + user_id, project_ref['id'], role_ref['id'] + ) # get auth projects r = self.get('/auth/projects', token=unscoped_token) @@ -3371,8 +3367,10 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): # assign role to group at project PROVIDERS.assignment_api.create_grant( - role_ref['id'], group_id=group_ref['id'], - project_id=project_ref['id'], domain_id=domain_id + role_ref['id'], + group_id=group_ref['id'], + project_id=project_ref['id'], + domain_id=domain_id, ) # add user to group @@ -3431,8 +3429,9 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): # assign domain role to group PROVIDERS.assignment_api.create_grant( - role_ref['id'], group_id=group_ref['id'], - domain_id=domain_ref['id'] + role_ref['id'], + group_id=group_ref['id'], + domain_id=domain_ref['id'], ) # add user to group @@ -3467,7 +3466,7 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): self.head( '/OS-FEDERATION/domains', token=unscoped_token, - expected_status=http.client.OK + expected_status=http.client.OK, ) # assign group domain and role to user, this should create a @@ -3508,13 +3507,14 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): self.head( '/OS-FEDERATION/projects', token=unscoped_token, - expected_status=http.client.OK + expected_status=http.client.OK, ) # assign group project and role to user, this should create a # duplicate project PROVIDERS.assignment_api.add_role_to_user_and_project( - user_id, project_from_group['id'], role_ref['id']) + user_id, project_from_group['id'], role_ref['id'] + ) # get user projects via /OS-FEDERATION/projects and test for duplicates r = self.get('/OS-FEDERATION/projects', token=unscoped_token) @@ -3536,7 +3536,8 @@ class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): # Create a protocol protocol = self.proto_ref(mapping_id=self.mapping['id']) PROVIDERS.federation_api.create_protocol( - self.IDP, protocol['id'], protocol) + self.IDP, protocol['id'], protocol + ) # Authenticate to create a new federated_user entry with a foreign # key pointing to the protocol @@ -3573,8 +3574,7 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): # update the mapping we have already setup to have specific projects # and roles. PROVIDERS.federation_api.update_mapping( - self.mapping['id'], - mapping_fixtures.MAPPING_PROJECTS + self.mapping['id'], mapping_fixtures.MAPPING_PROJECTS ) # The shadow mapping we're using in these tests contain a role named @@ -3603,7 +3603,7 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): self.expected_results = { 'Production': 'observer', 'Staging': 'member', - 'Project for tbo': 'admin' + 'Project for tbo': 'admin', } def auth_plugin_config_override(self): @@ -3621,21 +3621,22 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): response = self._issue_unscoped_token() self.assertValidMappedUser( - render_token.render_token_response_from_model(response)['token']) + render_token.render_token_response_from_model(response)['token'] + ) unscoped_token = response.id response = self.get('/auth/projects', token=unscoped_token) projects = response.json_body['projects'] for project in projects: project = PROVIDERS.resource_api.get_project_by_name( - project['name'], - self.idp['domain_id'] + project['name'], self.idp['domain_id'] ) self.assertIn(project['name'], self.expected_results) def test_shadow_mapping_create_projects_role_assignments(self): response = self._issue_unscoped_token() self.assertValidMappedUser( - render_token.render_token_response_from_model(response)['token']) + render_token.render_token_response_from_model(response)['token'] + ) unscoped_token = response.id response = self.get('/auth/projects', token=unscoped_token) projects = response.json_body['projects'] @@ -3663,7 +3664,8 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): def test_shadow_mapping_creates_project_in_identity_provider_domain(self): response = self._issue_unscoped_token() self.assertValidMappedUser( - render_token.render_token_response_from_model(response)['token']) + render_token.render_token_response_from_model(response)['token'] + ) unscoped_token = response.id response = self.get('/auth/projects', token=unscoped_token) projects = response.json_body['projects'] @@ -3674,7 +3676,8 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): """Test that projects remain idempotent for every federated auth.""" response = self._issue_unscoped_token() self.assertValidMappedUser( - render_token.render_token_response_from_model(response)['token']) + render_token.render_token_response_from_model(response)['token'] + ) unscoped_token = response.id response = self.get('/auth/projects', token=unscoped_token) project_ids = [p['id'] for p in response.json_body['projects']] @@ -3693,21 +3696,19 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): # Delete the member role and recreate it in a different domain PROVIDERS.role_api.delete_role(self.member_role['id']) member_role_ref = unit.new_role_ref( - name='member', - domain_id=new_domain['id'] + name='member', domain_id=new_domain['id'] ) PROVIDERS.role_api.create_role(member_role_ref['id'], member_role_ref) self.assertRaises( exception.DomainSpecificRoleNotWithinIdPDomain, - self._issue_unscoped_token + self._issue_unscoped_token, ) def test_roles_in_idp_domain_can_be_assigned_from_mapping(self): # Delete the member role and recreate it in the domain of the idp PROVIDERS.role_api.delete_role(self.member_role['id']) member_role_ref = unit.new_role_ref( - name='member', - domain_id=self.idp['domain_id'] + name='member', domain_id=self.idp['domain_id'] ) PROVIDERS.role_api.create_role(member_role_ref['id'], member_role_ref) response = self._issue_unscoped_token() @@ -3727,27 +3728,23 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): PROVIDERS.assignment_api.list_role_assignments( user_id=user_id, project_id=staging_project['id'], - strip_domain_roles=False + strip_domain_roles=False, ) ) self.assertEqual( staging_project['id'], domain_role_assignments[0]['project_id'] ) - self.assertEqual( - user_id, domain_role_assignments[0]['user_id'] - ) + self.assertEqual(user_id, domain_role_assignments[0]['user_id']) def test_mapping_with_groups_includes_projects_with_group_assignment(self): # create a group called Observers observer_group = unit.new_group_ref( - domain_id=self.idp['domain_id'], - name='Observers' + domain_id=self.idp['domain_id'], name='Observers' ) observer_group = PROVIDERS.identity_api.create_group(observer_group) # make sure the Observers group has a role on the finance project finance_project = unit.new_project_ref( - domain_id=self.idp['domain_id'], - name='Finance' + domain_id=self.idp['domain_id'], name='Finance' ) finance_project = PROVIDERS.resource_api.create_project( finance_project['id'], finance_project @@ -3755,15 +3752,13 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): PROVIDERS.assignment_api.create_grant( self.observer_role['id'], group_id=observer_group['id'], - project_id=finance_project['id'] + project_id=finance_project['id'], ) # update the mapping group_rule = { 'group': { 'name': 'Observers', - 'domain': { - 'id': self.idp['domain_id'] - } + 'domain': {'id': self.idp['domain_id']}, } } updated_mapping = copy.deepcopy(mapping_fixtures.MAPPING_PROJECTS) @@ -3784,7 +3779,7 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): 'Project for tbo': 'admin', # This is a result of the mapping engine maintaining its old # behavior. - 'Finance': 'observer' + 'Finance': 'observer', } for project in projects: # Ask for a scope token to each project in the mapping. Each token @@ -3806,7 +3801,8 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): # Authenticate once to create the projects response = self._issue_unscoped_token() self.assertValidMappedUser( - render_token.render_token_response_from_model(response)['token']) + render_token.render_token_response_from_model(response)['token'] + ) # Assign admin role to newly-created project to another user staging_project = PROVIDERS.resource_api.get_project_by_name( @@ -3815,14 +3811,16 @@ class ShadowMappingTests(test_v3.RestfulTestCase, FederatedSetupMixin): admin = unit.new_user_ref(CONF.identity.default_domain_id) PROVIDERS.identity_api.create_user(admin) PROVIDERS.assignment_api.create_grant( - self.role_admin['id'], user_id=admin['id'], - project_id=staging_project['id'] + self.role_admin['id'], + user_id=admin['id'], + project_id=staging_project['id'], ) # Authenticate again with the federated user and verify roles response = self._issue_unscoped_token() self.assertValidMappedUser( - render_token.render_token_response_from_model(response)['token']) + render_token.render_token_response_from_model(response)['token'] + ) unscoped_token = response.id scope = self._scope_request( unscoped_token, 'project', staging_project['id'] @@ -3848,9 +3846,8 @@ class JsonHomeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): def _is_xmlsec1_installed(): p = subprocess.Popen( - ['which', 'xmlsec1'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + ['which', 'xmlsec1'], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) # invert the return code return not bool(p.wait()) @@ -3863,8 +3860,10 @@ def _load_xml(filename): class SAMLGenerationTests(test_v3.RestfulTestCase): - SP_AUTH_URL = ('http://beta.com:5000/v3/OS-FEDERATION/identity_providers' - '/BETA/protocols/saml2/auth') + SP_AUTH_URL = ( + 'http://beta.com:5000/v3/OS-FEDERATION/identity_providers' + '/BETA/protocols/saml2/auth' + ) ASSERTION_FILE = 'signed_saml2_assertion.xml' @@ -3877,8 +3876,10 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): ROLES = ['admin', 'member'] PROJECT = 'development' PROJECT_DOMAIN = 'project_domain' - GROUPS = ['JSON:{"name":"group1","domain":{"name":"Default"}}', - 'JSON:{"name":"group2","domain":{"name":"Default"}}'] + GROUPS = [ + 'JSON:{"name":"group1","domain":{"name":"Default"}}', + 'JSON:{"name":"group2","domain":{"name":"Default"}}', + ] SAML_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2' ECP_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2/ecp' ASSERTION_VERSION = "2.0" @@ -3887,13 +3888,17 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): def setUp(self): super(SAMLGenerationTests, self).setUp() self.signed_assertion = saml2.create_class_from_xml_string( - saml.Assertion, _load_xml(self.ASSERTION_FILE)) + saml.Assertion, _load_xml(self.ASSERTION_FILE) + ) self.sp = core.new_service_provider_ref( auth_url=self.SP_AUTH_URL, sp_url=self.RECIPIENT ) url = '/OS-FEDERATION/service_providers/' + self.SERVICE_PROVDIER_ID - self.put(url, body={'service_provider': self.sp}, - expected_status=http.client.CREATED) + self.put( + url, + body={'service_provider': self.sp}, + expected_status=http.client.CREATED, + ) def test_samlize_token_values(self): """Test the SAML generator produces a SAML object. @@ -3903,15 +3908,20 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): the known arguments that were passed in. """ - with mock.patch.object(keystone_idp, '_sign_assertion', - return_value=self.signed_assertion): + with mock.patch.object( + keystone_idp, '_sign_assertion', return_value=self.signed_assertion + ): generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token(self.ISSUER, self.RECIPIENT, - self.SUBJECT, - self.SUBJECT_DOMAIN, - self.ROLES, self.PROJECT, - self.PROJECT_DOMAIN, - self.GROUPS) + response = generator.samlize_token( + self.ISSUER, + self.RECIPIENT, + self.SUBJECT, + self.SUBJECT_DOMAIN, + self.ROLES, + self.PROJECT, + self.PROJECT_DOMAIN, + self.GROUPS, + ) assertion = response.assertion self.assertIsNotNone(assertion) @@ -3923,23 +3933,27 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): user_attribute = assertion.attribute_statement[0].attribute[0] self.assertEqual(self.SUBJECT, user_attribute.attribute_value[0].text) - user_domain_attribute = ( - assertion.attribute_statement[0].attribute[1]) - self.assertEqual(self.SUBJECT_DOMAIN, - user_domain_attribute.attribute_value[0].text) + user_domain_attribute = assertion.attribute_statement[0].attribute[1] + self.assertEqual( + self.SUBJECT_DOMAIN, user_domain_attribute.attribute_value[0].text + ) role_attribute = assertion.attribute_statement[0].attribute[2] for attribute_value in role_attribute.attribute_value: self.assertIn(attribute_value.text, self.ROLES) project_attribute = assertion.attribute_statement[0].attribute[3] - self.assertEqual(self.PROJECT, - project_attribute.attribute_value[0].text) + self.assertEqual( + self.PROJECT, project_attribute.attribute_value[0].text + ) - project_domain_attribute = ( - assertion.attribute_statement[0].attribute[4]) - self.assertEqual(self.PROJECT_DOMAIN, - project_domain_attribute.attribute_value[0].text) + project_domain_attribute = assertion.attribute_statement[0].attribute[ + 4 + ] + self.assertEqual( + self.PROJECT_DOMAIN, + project_domain_attribute.attribute_value[0].text, + ) group_attribute = assertion.attribute_statement[0].attribute[5] for attribute_value in group_attribute.attribute_value: @@ -3947,8 +3961,8 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): def test_comma_in_certfile_path(self): self.config_fixture.config( - group='saml', - certfile=CONF.saml.certfile + ',') + group='saml', certfile=CONF.saml.certfile + ',' + ) generator = keystone_idp.SAMLGenerator() self.assertRaises( exception.UnexpectedError, @@ -3960,12 +3974,13 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): self.ROLES, self.PROJECT, self.PROJECT_DOMAIN, - self.GROUPS) + self.GROUPS, + ) def test_comma_in_keyfile_path(self): self.config_fixture.config( - group='saml', - keyfile=CONF.saml.keyfile + ',') + group='saml', keyfile=CONF.saml.keyfile + ',' + ) generator = keystone_idp.SAMLGenerator() self.assertRaises( exception.UnexpectedError, @@ -3977,7 +3992,8 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): self.ROLES, self.PROJECT, self.PROJECT_DOMAIN, - self.GROUPS) + self.GROUPS, + ) def test_verify_assertion_object(self): """Test that the Assertion object is built properly. @@ -3986,15 +4002,20 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): _sign_assertion method is patched and doesn't alter the assertion. """ - with mock.patch.object(keystone_idp, '_sign_assertion', - side_effect=lambda x: x): + with mock.patch.object( + keystone_idp, '_sign_assertion', side_effect=lambda x: x + ): generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token(self.ISSUER, self.RECIPIENT, - self.SUBJECT, - self.SUBJECT_DOMAIN, - self.ROLES, self.PROJECT, - self.PROJECT_DOMAIN, - self.GROUPS) + response = generator.samlize_token( + self.ISSUER, + self.RECIPIENT, + self.SUBJECT, + self.SUBJECT_DOMAIN, + self.ROLES, + self.PROJECT, + self.PROJECT_DOMAIN, + self.GROUPS, + ) assertion = response.assertion self.assertEqual(self.ASSERTION_VERSION, assertion.version) @@ -4006,15 +4027,20 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): the known arguments that were passed in. """ - with mock.patch.object(keystone_idp, '_sign_assertion', - return_value=self.signed_assertion): + with mock.patch.object( + keystone_idp, '_sign_assertion', return_value=self.signed_assertion + ): generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token(self.ISSUER, self.RECIPIENT, - self.SUBJECT, - self.SUBJECT_DOMAIN, - self.ROLES, self.PROJECT, - self.PROJECT_DOMAIN, - self.GROUPS) + response = generator.samlize_token( + self.ISSUER, + self.RECIPIENT, + self.SUBJECT, + self.SUBJECT_DOMAIN, + self.ROLES, + self.PROJECT, + self.PROJECT_DOMAIN, + self.GROUPS, + ) saml_str = response.to_string() response = etree.fromstring(saml_str) @@ -4055,15 +4081,22 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): # the assertion as is without signing it return assertion_content - with mock.patch.object(subprocess, 'check_output', - side_effect=mocked_subprocess_check_output): + with mock.patch.object( + subprocess, + 'check_output', + side_effect=mocked_subprocess_check_output, + ): generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token(self.ISSUER, self.RECIPIENT, - self.SUBJECT, - self.SUBJECT_DOMAIN, - self.ROLES, self.PROJECT, - self.PROJECT_DOMAIN, - self.GROUPS) + response = generator.samlize_token( + self.ISSUER, + self.RECIPIENT, + self.SUBJECT, + self.SUBJECT_DOMAIN, + self.ROLES, + self.PROJECT, + self.PROJECT_DOMAIN, + self.GROUPS, + ) assertion_xml = response.assertion.to_string() # The expected values in the assertions bellow need to be 'str' in # Python 2 and 'bytes' in Python 3 @@ -4072,10 +4105,12 @@ class SAMLGenerationTests(test_v3.RestfulTestCase): self.assertIn(b' 0 - url = ( - '/users/%(user_id)s/OS-OAUTH1/access_tokens' - % {'user_id': self.user_id} - ) + url = '/users/%(user_id)s/OS-OAUTH1/access_tokens' % { + 'user_id': self.user_id + } resp = self.get(url) self.head(url, expected_status=http.client.OK) entities = resp.result['access_tokens'] @@ -418,9 +443,10 @@ class AccessTokenCRUDTests(OAuthFlowTests): access_token_key = self.access_token.key.decode() # Delete access_token - resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' - % {'user': self.user_id, - 'auth': access_token_key}) + resp = self.delete( + '/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' + % {'user': self.user_id, 'auth': access_token_key} + ) self.assertResponseStatus(resp, http.client.NO_CONTENT) # List access_token should be 0 @@ -435,15 +461,18 @@ class AuthTokenTests(object): def test_keystone_token_is_valid(self): self.test_oauth_flow() - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} + headers = { + 'X-Subject-Token': self.keystone_token_id, + 'X-Auth-Token': self.keystone_token_id, + } r = self.get('/auth/tokens', headers=headers) self.assertValidTokenResponse(r, self.user) # now verify the oauth section oauth_section = r.result['token']['OS-OAUTH1'] - self.assertEqual(self.access_token.key.decode(), - oauth_section['access_token_id']) + self.assertEqual( + self.access_token.key.decode(), oauth_section['access_token_id'] + ) self.assertEqual(self.consumer['key'], oauth_section['consumer_id']) # verify the roles section @@ -453,8 +482,12 @@ class AuthTokenTests(object): # verify that the token can perform delegated tasks ref = unit.new_user_ref(domain_id=self.domain_id) - r = self.admin_request(path='/v3/users', headers=headers, - method='POST', body={'user': ref}) + r = self.admin_request( + path='/v3/users', + headers=headers, + method='POST', + body={'user': ref}, + ) self.assertValidUserResponse(r, ref) def test_delete_access_token_also_revokes_token(self): @@ -462,70 +495,99 @@ class AuthTokenTests(object): access_token_key = self.access_token.key.decode() # Delete access token - resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' - % {'user': self.user_id, - 'auth': access_token_key}) + resp = self.delete( + '/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' + % {'user': self.user_id, 'auth': access_token_key} + ) self.assertResponseStatus(resp, http.client.NO_CONTENT) # Check Keystone Token no longer exists - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - self.get('/auth/tokens', headers=headers, - expected_status=http.client.NOT_FOUND) + headers = { + 'X-Subject-Token': self.keystone_token_id, + 'X-Auth-Token': self.keystone_token_id, + } + self.get( + '/auth/tokens', + headers=headers, + expected_status=http.client.NOT_FOUND, + ) def test_deleting_consumer_also_deletes_tokens(self): self.test_oauth_flow() # Delete consumer consumer_id = self.consumer['key'] - resp = self.delete('/OS-OAUTH1/consumers/%(consumer_id)s' - % {'consumer_id': consumer_id}) + resp = self.delete( + '/OS-OAUTH1/consumers/%(consumer_id)s' + % {'consumer_id': consumer_id} + ) self.assertResponseStatus(resp, http.client.NO_CONTENT) # List access_token should be 0 - resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens' - % {'user_id': self.user_id}) + resp = self.get( + '/users/%(user_id)s/OS-OAUTH1/access_tokens' + % {'user_id': self.user_id} + ) entities = resp.result['access_tokens'] self.assertEqual([], entities) # Check Keystone Token no longer exists - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - self.head('/auth/tokens', headers=headers, - expected_status=http.client.NOT_FOUND) + headers = { + 'X-Subject-Token': self.keystone_token_id, + 'X-Auth-Token': self.keystone_token_id, + } + self.head( + '/auth/tokens', + headers=headers, + expected_status=http.client.NOT_FOUND, + ) def test_change_user_password_also_deletes_tokens(self): self.test_oauth_flow() # delegated keystone token exists - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} + headers = { + 'X-Subject-Token': self.keystone_token_id, + 'X-Auth-Token': self.keystone_token_id, + } r = self.get('/auth/tokens', headers=headers) self.assertValidTokenResponse(r, self.user) user = {'password': uuid.uuid4().hex} - r = self.patch('/users/%(user_id)s' % { - 'user_id': self.user['id']}, - body={'user': user}) + r = self.patch( + '/users/%(user_id)s' % {'user_id': self.user['id']}, + body={'user': user}, + ) headers = {'X-Subject-Token': self.keystone_token_id} - self.get(path='/auth/tokens', token=self.get_admin_token(), - headers=headers, expected_status=http.client.NOT_FOUND) + self.get( + path='/auth/tokens', + token=self.get_admin_token(), + headers=headers, + expected_status=http.client.NOT_FOUND, + ) def test_deleting_project_also_invalidates_tokens(self): self.test_oauth_flow() # delegated keystone token exists - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} + headers = { + 'X-Subject-Token': self.keystone_token_id, + 'X-Auth-Token': self.keystone_token_id, + } r = self.get('/auth/tokens', headers=headers) self.assertValidTokenResponse(r, self.user) - r = self.delete('/projects/%(project_id)s' % { - 'project_id': self.project_id}) + r = self.delete( + '/projects/%(project_id)s' % {'project_id': self.project_id} + ) headers = {'X-Subject-Token': self.keystone_token_id} - self.get(path='/auth/tokens', token=self.get_admin_token(), - headers=headers, expected_status=http.client.NOT_FOUND) + self.get( + path='/auth/tokens', + token=self.get_admin_token(), + headers=headers, + expected_status=http.client.NOT_FOUND, + ) def test_token_chaining_is_not_allowed(self): self.test_oauth_flow() @@ -533,26 +595,30 @@ class AuthTokenTests(object): # attempt to re-authenticate (token chain) with the given token path = '/v3/auth/tokens/' auth_data = self.build_authentication_request( - token=self.keystone_token_id) + token=self.keystone_token_id + ) self.admin_request( path=path, body=auth_data, token=self.keystone_token_id, method='POST', - expected_status=http.client.FORBIDDEN) + expected_status=http.client.FORBIDDEN, + ) def test_delete_keystone_tokens_by_consumer_id(self): self.test_oauth_flow() PROVIDERS.token_provider_api._persistence.get_token( - self.keystone_token_id) + self.keystone_token_id + ) PROVIDERS.token_provider_api._persistence.delete_tokens( - self.user_id, - consumer_id=self.consumer['key']) + self.user_id, consumer_id=self.consumer['key'] + ) self.assertRaises( exception.TokenNotFound, PROVIDERS.token_provider_api._persistence.get_token, - self.keystone_token_id) + self.keystone_token_id, + ) def _create_trust_get_token(self): ref = unit.new_trust_ref( @@ -561,7 +627,8 @@ class AuthTokenTests(object): project_id=self.project_id, impersonation=True, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) del ref['id'] r = self.post('/OS-TRUST/trusts', body={'trust': ref}) @@ -570,7 +637,8 @@ class AuthTokenTests(object): auth_data = self.build_authentication_request( user_id=self.user['id'], password=self.user['password'], - trust_id=trust['id']) + trust_id=trust['id'], + ) return self.get_requested_token(auth_data) @@ -581,11 +649,14 @@ class AuthTokenTests(object): self.consumer = {'key': consumer_id, 'secret': consumer_secret} self.assertIsNotNone(self.consumer['secret']) - url, headers = self._create_request_token(self.consumer, - self.project_id) + url, headers = self._create_request_token( + self.consumer, self.project_id + ) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] request_secret = credentials['oauth_token_secret'][0] @@ -604,35 +675,50 @@ class AuthTokenTests(object): project_id=self.project_id, impersonation=True, expires=dict(minutes=1), - role_ids=[self.role_id]) + role_ids=[self.role_id], + ) del ref['id'] - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=self.keystone_token_id, - expected_status=http.client.FORBIDDEN) + self.post( + '/OS-TRUST/trusts', + body={'trust': ref}, + token=self.keystone_token_id, + expected_status=http.client.FORBIDDEN, + ) def test_oauth_token_cannot_authorize_request_token(self): self.test_oauth_flow() url = self._approve_request_token_url() body = {'roles': [{'id': self.role_id}]} - self.put(url, body=body, token=self.keystone_token_id, - expected_status=http.client.FORBIDDEN) + self.put( + url, + body=body, + token=self.keystone_token_id, + expected_status=http.client.FORBIDDEN, + ) def test_oauth_token_cannot_list_request_tokens(self): - self._set_policy({"identity:list_access_tokens": [], - "identity:create_consumer": [], - "identity:authorize_request_token": []}) + self._set_policy( + { + "identity:list_access_tokens": [], + "identity:create_consumer": [], + "identity:authorize_request_token": [], + } + ) self.test_oauth_flow() url = '/users/%s/OS-OAUTH1/access_tokens' % self.user_id - self.get(url, token=self.keystone_token_id, - expected_status=http.client.FORBIDDEN) + self.get( + url, + token=self.keystone_token_id, + expected_status=http.client.FORBIDDEN, + ) def _set_policy(self, new_policy): self.tempfile = self.useFixture(temporaryfile.SecureTempFile()) self.tmpfilename = self.tempfile.file_name - self.config_fixture.config(group='oslo_policy', - policy_file=self.tmpfilename) + self.config_fixture.config( + group='oslo_policy', policy_file=self.tmpfilename + ) with open(self.tmpfilename, "w") as policyfile: policyfile.write(jsonutils.dumps(new_policy)) @@ -640,16 +726,20 @@ class AuthTokenTests(object): trust_token = self._create_trust_get_token() url = self._approve_request_token_url() body = {'roles': [{'id': self.role_id}]} - self.put(url, body=body, token=trust_token, - expected_status=http.client.FORBIDDEN) + self.put( + url, + body=body, + token=trust_token, + expected_status=http.client.FORBIDDEN, + ) def test_trust_token_cannot_list_request_tokens(self): - self._set_policy({"identity:list_access_tokens": [], - "identity:create_trust": []}) + self._set_policy( + {"identity:list_access_tokens": [], "identity:create_trust": []} + ) trust_token = self._create_trust_get_token() url = '/users/%s/OS-OAUTH1/access_tokens' % self.user_id - self.get(url, token=trust_token, - expected_status=http.client.FORBIDDEN) + self.get(url, token=trust_token, expected_status=http.client.FORBIDDEN) class FernetAuthTokenTests(AuthTokenTests, OAuthFlowTests): @@ -661,7 +751,7 @@ class FernetAuthTokenTests(AuthTokenTests, OAuthFlowTests): ksfixtures.KeyRepository( self.config_fixture, 'fernet_tokens', - CONF.fernet_tokens.max_active_keys + CONF.fernet_tokens.max_active_keys, ) ) @@ -683,8 +773,9 @@ class MaliciousOAuth1Tests(OAuth1Tests): consumer_id = consumer['id'] consumer = {'key': consumer_id, 'secret': uuid.uuid4().hex} url, headers = self._create_request_token(consumer, self.project_id) - self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) def test_bad_request_url(self): consumer = self._create_single_consumer() @@ -692,10 +783,12 @@ class MaliciousOAuth1Tests(OAuth1Tests): consumer_secret = consumer['secret'] consumer = {'key': consumer_id, 'secret': consumer_secret} bad_base_url = 'http://localhost/identity_admin/v3' - url, headers = self._create_request_token(consumer, self.project_id, - base_url=bad_base_url) - self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + url, headers = self._create_request_token( + consumer, self.project_id, base_url=bad_base_url + ) + self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) def test_bad_request_url_scheme(self): consumer = self._create_single_consumer() @@ -703,10 +796,12 @@ class MaliciousOAuth1Tests(OAuth1Tests): consumer_secret = consumer['secret'] consumer = {'key': consumer_id, 'secret': consumer_secret} bad_url_scheme = self._switch_baseurl_scheme() - url, headers = self._create_request_token(consumer, self.project_id, - base_url=bad_url_scheme) - self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + url, headers = self._create_request_token( + consumer, self.project_id, base_url=bad_url_scheme + ) + self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) def test_bad_request_token_key(self): consumer = self._create_single_consumer() @@ -715,8 +810,10 @@ class MaliciousOAuth1Tests(OAuth1Tests): consumer = {'key': consumer_id, 'secret': consumer_secret} url, headers = self._create_request_token(consumer, self.project_id) self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) url = self._authorize_request_token(uuid.uuid4().hex) body = {'roles': [{'id': self.role_id}]} self.put(url, body=body, expected_status=http.client.NOT_FOUND) @@ -728,8 +825,10 @@ class MaliciousOAuth1Tests(OAuth1Tests): consumer = {'key': consumer_id, 'secret': consumer_secret} url, headers = self._create_request_token(consumer, self.project_id) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] url = self._authorize_request_token(request_key) @@ -762,8 +861,10 @@ class MaliciousOAuth1Tests(OAuth1Tests): url, headers = self._create_request_token(consumer, self.project_id) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] request_secret = credentials['oauth_token_secret'][0] @@ -777,11 +878,14 @@ class MaliciousOAuth1Tests(OAuth1Tests): request_token.set_verifier(uuid.uuid4().hex) url, headers = self._create_access_token(consumer, request_token) - resp = self.post(url, headers=headers, - expected_status=http.client.BAD_REQUEST) + resp = self.post( + url, headers=headers, expected_status=http.client.BAD_REQUEST + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Validation failed with errors', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Validation failed with errors', + resp_data.get('error', {}).get('message'), + ) def test_validate_access_token_request_failed(self): self.config_fixture.config(debug=True, insecure_debug=True) @@ -792,8 +896,10 @@ class MaliciousOAuth1Tests(OAuth1Tests): url, headers = self._create_request_token(consumer, self.project_id) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] request_secret = credentials['oauth_token_secret'][0] @@ -808,68 +914,84 @@ class MaliciousOAuth1Tests(OAuth1Tests): # 1. Invalid base url. # Update the base url, so it will fail to validate the signature. base_url = 'http://localhost/identity_admin/v3' - url, headers = self._create_access_token(consumer, request_token, - base_url=base_url) - resp = self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + url, headers = self._create_access_token( + consumer, request_token, base_url=base_url + ) + resp = self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Invalid signature', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Invalid signature', resp_data.get('error', {}).get('message') + ) # 2. Invalid base url scheme. # Update the base url scheme, so it will fail to validate signature. bad_url_scheme = self._switch_baseurl_scheme() - url, headers = self._create_access_token(consumer, request_token, - base_url=bad_url_scheme) - resp = self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + url, headers = self._create_access_token( + consumer, request_token, base_url=bad_url_scheme + ) + resp = self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Invalid signature', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Invalid signature', resp_data.get('error', {}).get('message') + ) # 3. Invalid signature. # Update the secret, so it will fail to validate the signature. consumer.update({'secret': uuid.uuid4().hex}) url, headers = self._create_access_token(consumer, request_token) - resp = self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + resp = self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Invalid signature', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Invalid signature', resp_data.get('error', {}).get('message') + ) # 4. Invalid verifier. # Even though the verifier is well formatted, it is not verifier # that is stored in the backend, this is different with the testcase # above `test_bad_verifier` where it test that `verifier` is not # well formatted. - verifier = ''.join(random.SystemRandom().sample(base.VERIFIER_CHARS, - 8)) + verifier = ''.join( + random.SystemRandom().sample(base.VERIFIER_CHARS, 8) + ) request_token.set_verifier(verifier) url, headers = self._create_access_token(consumer, request_token) - resp = self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + resp = self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Provided verifier', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Provided verifier', resp_data.get('error', {}).get('message') + ) # 5. The provided consumer does not exist. consumer.update({'key': uuid.uuid4().hex}) url, headers = self._create_access_token(consumer, request_token) - resp = self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + resp = self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Provided consumer does not exist', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Provided consumer does not exist', + resp_data.get('error', {}).get('message'), + ) # 6. The consumer key provided does not match stored consumer key. consumer2 = self._create_single_consumer() consumer.update({'key': consumer2['id']}) url, headers = self._create_access_token(consumer, request_token) - resp = self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + resp = self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Provided consumer key', - resp_data.get('error', {}).get('message')) + self.assertIn( + 'Provided consumer key', resp_data.get('error', {}).get('message') + ) def test_bad_authorizing_roles_id(self): consumer = self._create_single_consumer() @@ -885,17 +1007,21 @@ class MaliciousOAuth1Tests(OAuth1Tests): PROVIDERS.assignment_api.add_role_to_user_and_project( user_id=self.user_id, project_id=self.project_id, - role_id=new_role['id']) + role_id=new_role['id'], + ) url, headers = self._create_request_token(consumer, self.project_id) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] PROVIDERS.assignment_api.remove_role_from_user_and_project( - self.user_id, self.project_id, new_role['id']) + self.user_id, self.project_id, new_role['id'] + ) url = self._authorize_request_token(request_key) body = {'roles': [{'id': new_role['id']}]} # NOTE(morgan): previous versions of this test erroneously checked for @@ -912,8 +1038,10 @@ class MaliciousOAuth1Tests(OAuth1Tests): url, headers = self._create_request_token(consumer, self.project_id) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] @@ -929,8 +1057,10 @@ class MaliciousOAuth1Tests(OAuth1Tests): url, headers = self._create_request_token(consumer, self.project_id) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] request_secret = credentials['oauth_token_secret'][0] @@ -942,18 +1072,21 @@ class MaliciousOAuth1Tests(OAuth1Tests): verifier = resp.result['token']['oauth_verifier'] request_token.set_verifier(verifier) request_token_created = PROVIDERS.oauth_api.get_request_token( - request_key.decode('utf-8')) + request_key.decode('utf-8') + ) request_token_created.update({'authorizing_user_id': ''}) # Update the request token that is created instead of mocking # the whole token object to focus on what's we want to test # here and avoid any other factors that will result in the same # exception. - with mock.patch.object(PROVIDERS.oauth_api, - 'get_request_token') as mock_token: + with mock.patch.object( + PROVIDERS.oauth_api, 'get_request_token' + ) as mock_token: mock_token.return_value = request_token_created url, headers = self._create_access_token(consumer, request_token) - self.post(url, headers=headers, - expected_status=http.client.UNAUTHORIZED) + self.post( + url, headers=headers, expected_status=http.client.UNAUTHORIZED + ) def test_validate_requet_token_request_failed(self): self.config_fixture.config(debug=True, insecure_debug=True) @@ -963,22 +1096,27 @@ class MaliciousOAuth1Tests(OAuth1Tests): consumer = {'key': consumer_id, 'secret': consumer_secret} url = '/OS-OAUTH1/request_token' - auth_header = ('OAuth oauth_version="1.0", oauth_consumer_key=' + - consumer_id) - faked_header = {'Authorization': auth_header, - 'requested_project_id': self.project_id} + auth_header = ( + 'OAuth oauth_version="1.0", oauth_consumer_key=' + consumer_id + ) + faked_header = { + 'Authorization': auth_header, + 'requested_project_id': self.project_id, + } resp = self.post( - url, headers=faked_header, - expected_status=http.client.BAD_REQUEST) + url, headers=faked_header, expected_status=http.client.BAD_REQUEST + ) resp_data = jsonutils.loads(resp.body) - self.assertIn('Validation failed with errors', - resp_data['error']['message']) + self.assertIn( + 'Validation failed with errors', resp_data['error']['message'] + ) def test_expired_authorizing_request_token(self): with freezegun.freeze_time(datetime.datetime.utcnow()) as frozen_time: - self.config_fixture.config(group='oauth1', - request_token_duration=1) + self.config_fixture.config( + group='oauth1', request_token_duration=1 + ) consumer = self._create_single_consumer() consumer_id = consumer['id'] @@ -986,11 +1124,14 @@ class MaliciousOAuth1Tests(OAuth1Tests): self.consumer = {'key': consumer_id, 'secret': consumer_secret} self.assertIsNotNone(self.consumer['key']) - url, headers = self._create_request_token(self.consumer, - self.project_id) + url, headers = self._create_request_token( + self.consumer, self.project_id + ) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] request_secret = credentials['oauth_token_secret'][0] @@ -999,14 +1140,16 @@ class MaliciousOAuth1Tests(OAuth1Tests): url = self._authorize_request_token(request_key) body = {'roles': [{'id': self.role_id}]} - frozen_time.tick(delta=datetime.timedelta( - seconds=CONF.oauth1.request_token_duration + 1)) + frozen_time.tick( + delta=datetime.timedelta( + seconds=CONF.oauth1.request_token_duration + 1 + ) + ) self.put(url, body=body, expected_status=http.client.UNAUTHORIZED) def test_expired_creating_keystone_token(self): with freezegun.freeze_time(datetime.datetime.utcnow()) as frozen_time: - self.config_fixture.config(group='oauth1', - access_token_duration=1) + self.config_fixture.config(group='oauth1', access_token_duration=1) consumer = self._create_single_consumer() consumer_id = consumer['id'] @@ -1014,11 +1157,14 @@ class MaliciousOAuth1Tests(OAuth1Tests): self.consumer = {'key': consumer_id, 'secret': consumer_secret} self.assertIsNotNone(self.consumer['key']) - url, headers = self._create_request_token(self.consumer, - self.project_id) + url, headers = self._create_request_token( + self.consumer, self.project_id + ) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] request_secret = credentials['oauth_token_secret'][0] @@ -1031,77 +1177,106 @@ class MaliciousOAuth1Tests(OAuth1Tests): self.verifier = resp.result['token']['oauth_verifier'] self.request_token.set_verifier(self.verifier) - url, headers = self._create_access_token(self.consumer, - self.request_token) + url, headers = self._create_access_token( + self.consumer, self.request_token + ) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) access_key = credentials['oauth_token'][0] access_secret = credentials['oauth_token_secret'][0] self.access_token = oauth1.Token(access_key, access_secret) self.assertIsNotNone(self.access_token.key) - url, headers, body = self._get_oauth_token(self.consumer, - self.access_token) - frozen_time.tick(delta=datetime.timedelta( - seconds=CONF.oauth1.access_token_duration + 1)) - self.post(url, headers=headers, body=body, - expected_status=http.client.UNAUTHORIZED) + url, headers, body = self._get_oauth_token( + self.consumer, self.access_token + ) + frozen_time.tick( + delta=datetime.timedelta( + seconds=CONF.oauth1.access_token_duration + 1 + ) + ) + self.post( + url, + headers=headers, + body=body, + expected_status=http.client.UNAUTHORIZED, + ) def test_missing_oauth_headers(self): endpoint = '/OS-OAUTH1/request_token' - client = oauth1.Client(uuid.uuid4().hex, - client_secret=uuid.uuid4().hex, - signature_method=oauth1.SIG_HMAC, - callback_uri="oob") + client = oauth1.Client( + uuid.uuid4().hex, + client_secret=uuid.uuid4().hex, + signature_method=oauth1.SIG_HMAC, + callback_uri="oob", + ) headers = {'requested_project_id': uuid.uuid4().hex} - _url, headers, _body = client.sign(self.base_url + endpoint, - http_method='POST', - headers=headers) + _url, headers, _body = client.sign( + self.base_url + endpoint, http_method='POST', headers=headers + ) # NOTE(stevemar): To simulate this error, we remove the Authorization # header from the post request. del headers['Authorization'] - self.post(endpoint, headers=headers, - expected_status=http.client.INTERNAL_SERVER_ERROR) + self.post( + endpoint, + headers=headers, + expected_status=http.client.INTERNAL_SERVER_ERROR, + ) -class OAuthNotificationTests(OAuth1Tests, - test_notifications.BaseNotificationTest): +class OAuthNotificationTests( + OAuth1Tests, test_notifications.BaseNotificationTest +): def test_create_consumer(self): consumer_ref = self._create_single_consumer() - self._assert_notify_sent(consumer_ref['id'], - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:consumer') - self._assert_last_audit(consumer_ref['id'], - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:consumer', - cadftaxonomy.SECURITY_ACCOUNT) + self._assert_notify_sent( + consumer_ref['id'], + test_notifications.CREATED_OPERATION, + 'OS-OAUTH1:consumer', + ) + self._assert_last_audit( + consumer_ref['id'], + test_notifications.CREATED_OPERATION, + 'OS-OAUTH1:consumer', + cadftaxonomy.SECURITY_ACCOUNT, + ) def test_update_consumer(self): consumer_ref = self._create_single_consumer() update_ref = {'consumer': {'description': uuid.uuid4().hex}} PROVIDERS.oauth_api.update_consumer(consumer_ref['id'], update_ref) - self._assert_notify_sent(consumer_ref['id'], - test_notifications.UPDATED_OPERATION, - 'OS-OAUTH1:consumer') - self._assert_last_audit(consumer_ref['id'], - test_notifications.UPDATED_OPERATION, - 'OS-OAUTH1:consumer', - cadftaxonomy.SECURITY_ACCOUNT) + self._assert_notify_sent( + consumer_ref['id'], + test_notifications.UPDATED_OPERATION, + 'OS-OAUTH1:consumer', + ) + self._assert_last_audit( + consumer_ref['id'], + test_notifications.UPDATED_OPERATION, + 'OS-OAUTH1:consumer', + cadftaxonomy.SECURITY_ACCOUNT, + ) def test_delete_consumer(self): consumer_ref = self._create_single_consumer() PROVIDERS.oauth_api.delete_consumer(consumer_ref['id']) - self._assert_notify_sent(consumer_ref['id'], - test_notifications.DELETED_OPERATION, - 'OS-OAUTH1:consumer') - self._assert_last_audit(consumer_ref['id'], - test_notifications.DELETED_OPERATION, - 'OS-OAUTH1:consumer', - cadftaxonomy.SECURITY_ACCOUNT) + self._assert_notify_sent( + consumer_ref['id'], + test_notifications.DELETED_OPERATION, + 'OS-OAUTH1:consumer', + ) + self._assert_last_audit( + consumer_ref['id'], + test_notifications.DELETED_OPERATION, + 'OS-OAUTH1:consumer', + cadftaxonomy.SECURITY_ACCOUNT, + ) def test_oauth_flow_notifications(self): """Test to ensure notifications are sent for oauth tokens. @@ -1117,11 +1292,14 @@ class OAuthNotificationTests(OAuth1Tests, self.consumer = {'key': consumer_id, 'secret': consumer_secret} self.assertIsNotNone(self.consumer['secret']) - url, headers = self._create_request_token(self.consumer, - self.project_id) + url, headers = self._create_request_token( + self.consumer, self.project_id + ) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) request_key = credentials['oauth_token'][0] request_secret = credentials['oauth_token_secret'][0] @@ -1130,13 +1308,17 @@ class OAuthNotificationTests(OAuth1Tests, request_key_string = request_key.decode() # Test to ensure the create request token notification is sent - self._assert_notify_sent(request_key_string, - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:request_token') - self._assert_last_audit(request_key_string, - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:request_token', - cadftaxonomy.SECURITY_CREDENTIAL) + self._assert_notify_sent( + request_key_string, + test_notifications.CREATED_OPERATION, + 'OS-OAUTH1:request_token', + ) + self._assert_last_audit( + request_key_string, + test_notifications.CREATED_OPERATION, + 'OS-OAUTH1:request_token', + cadftaxonomy.SECURITY_CREDENTIAL, + ) url = self._authorize_request_token(request_key) body = {'roles': [{'id': self.role_id}]} @@ -1146,11 +1328,14 @@ class OAuthNotificationTests(OAuth1Tests, self.assertEqual(8, len(self.verifier)) self.request_token.set_verifier(self.verifier) - url, headers = self._create_access_token(self.consumer, - self.request_token) + url, headers = self._create_access_token( + self.consumer, self.request_token + ) content = self.post( - url, headers=headers, - response_content_type='application/x-www-form-urlencoded') + url, + headers=headers, + response_content_type='application/x-www-form-urlencoded', + ) credentials = _urllib_parse_qs_text_keys(content.result) access_key = credentials['oauth_token'][0] access_secret = credentials['oauth_token_secret'][0] @@ -1159,27 +1344,36 @@ class OAuthNotificationTests(OAuth1Tests, access_key_string = access_key.decode() # Test to ensure the create access token notification is sent - self._assert_notify_sent(access_key_string, - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:access_token') - self._assert_last_audit(access_key_string, - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:access_token', - cadftaxonomy.SECURITY_CREDENTIAL) + self._assert_notify_sent( + access_key_string, + test_notifications.CREATED_OPERATION, + 'OS-OAUTH1:access_token', + ) + self._assert_last_audit( + access_key_string, + test_notifications.CREATED_OPERATION, + 'OS-OAUTH1:access_token', + cadftaxonomy.SECURITY_CREDENTIAL, + ) - resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' - % {'user': self.user_id, - 'auth': self.access_token.key.decode()}) + resp = self.delete( + '/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' + % {'user': self.user_id, 'auth': self.access_token.key.decode()} + ) self.assertResponseStatus(resp, http.client.NO_CONTENT) # Test to ensure the delete access token notification is sent - self._assert_notify_sent(access_key_string, - test_notifications.DELETED_OPERATION, - 'OS-OAUTH1:access_token') - self._assert_last_audit(access_key_string, - test_notifications.DELETED_OPERATION, - 'OS-OAUTH1:access_token', - cadftaxonomy.SECURITY_CREDENTIAL) + self._assert_notify_sent( + access_key_string, + test_notifications.DELETED_OPERATION, + 'OS-OAUTH1:access_token', + ) + self._assert_last_audit( + access_key_string, + test_notifications.DELETED_OPERATION, + 'OS-OAUTH1:access_token', + cadftaxonomy.SECURITY_CREDENTIAL, + ) class OAuthCADFNotificationTests(OAuthNotificationTests): diff --git a/keystone/tests/unit/test_v3_oauth2.py b/keystone/tests/unit/test_v3_oauth2.py index 6eaa8560f9..b5f2e33bda 100644 --- a/keystone/tests/unit/test_v3_oauth2.py +++ b/keystone/tests/unit/test_v3_oauth2.py @@ -52,26 +52,21 @@ class OAuth2AuthnMethodsTests(test_v3.OAuth2RestfulTestCase): ) def _get_access_token( - self, - headers, - data, - expected_status, - client_cert_content=None): + self, headers, data, expected_status, client_cert_content=None + ): data = parse.urlencode(data).encode() kwargs = { 'headers': headers, 'noauth': True, 'convert': False, 'body': data, - 'expected_status': expected_status + 'expected_status': expected_status, } if client_cert_content: - kwargs.update({'environ': { - 'SSL_CLIENT_CERT': client_cert_content - }}) - resp = self.post( - self.ACCESS_TOKEN_URL, - **kwargs) + kwargs.update( + {'environ': {'SSL_CLIENT_CERT': client_cert_content}} + ) + resp = self.post(self.ACCESS_TOKEN_URL, **kwargs) return resp def _create_certificates(self): @@ -80,7 +75,7 @@ class OAuth2AuthnMethodsTests(test_v3.OAuth2RestfulTestCase): country_name='jp', state_or_province_name='tokyo', locality_name='musashino', - organizational_unit_name='test' + organizational_unit_name='test', ) ) @@ -92,22 +87,21 @@ class OAuth2AuthnMethodsTests(test_v3.OAuth2RestfulTestCase): """client_secret_basic is used if a client sercret is found.""" client_id = 'client_id' client_secret = 'client_secret' - b64str = b64encode( - f'{client_id}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id}:{client_secret}'.encode()).decode().strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' - } - data = { - 'grant_type': 'client_credentials' + 'Authorization': f'Basic {b64str}', } + data = {'grant_type': 'client_credentials'} _ = self._get_access_token( - headers=headers, - data=data, - expected_status=client.OK) + headers=headers, data=data, expected_status=client.OK + ) mock_client_secret_basic.assert_called_once_with( - client_id, client_secret) + client_id, client_secret + ) @mock.patch.object(AccessTokenResource, '_client_secret_basic') def test_secret_basic_form(self, mock_client_secret_basic): @@ -120,15 +114,15 @@ class OAuth2AuthnMethodsTests(test_v3.OAuth2RestfulTestCase): data = { 'grant_type': 'client_credentials', 'client_id': client_id, - 'client_secret': client_secret + 'client_secret': client_secret, } _ = self._get_access_token( - headers=headers, - data=data, - expected_status=client.OK) + headers=headers, data=data, expected_status=client.OK + ) mock_client_secret_basic.assert_called_once_with( - client_id, client_secret) + client_id, client_secret + ) @mock.patch.object(AccessTokenResource, '_client_secret_basic') def test_secret_basic_header_and_form(self, mock_client_secret_basic): @@ -137,24 +131,27 @@ class OAuth2AuthnMethodsTests(test_v3.OAuth2RestfulTestCase): client_secret_h = 'client_secret_h' client_id_d = 'client_id_d' client_secret_d = 'client_secret_d' - b64str = b64encode( - f'{client_id_h}:{client_secret_h}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id_h}:{client_secret_h}'.encode()) + .decode() + .strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' + 'Authorization': f'Basic {b64str}', } data = { 'grant_type': 'client_credentials', 'client_id': client_id_d, - 'client_secret': client_secret_d + 'client_secret': client_secret_d, } _ = self._get_access_token( - headers=headers, - data=data, - expected_status=client.OK) + headers=headers, data=data, expected_status=client.OK + ) mock_client_secret_basic.assert_called_once_with( - client_id_h, client_secret_h) + client_id_h, client_secret_h + ) @mock.patch.object(AccessTokenResource, '_tls_client_auth') def test_client_cert(self, mock_tls_client_auth): @@ -165,15 +162,13 @@ class OAuth2AuthnMethodsTests(test_v3.OAuth2RestfulTestCase): headers = { 'Content-Type': 'application/x-www-form-urlencoded', } - data = { - 'grant_type': 'client_credentials', - 'client_id': client_id - } + data = {'grant_type': 'client_credentials', 'client_id': client_id} _ = self._get_access_token( headers=headers, data=data, expected_status=client.OK, - client_cert_content=cert_content) + client_cert_content=cert_content, + ) mock_tls_client_auth.assert_called_once_with(client_id, cert_content) @mock.patch.object(AccessTokenResource, '_tls_client_auth') @@ -184,11 +179,14 @@ class OAuth2AuthnMethodsTests(test_v3.OAuth2RestfulTestCase): client_id_c = 'client_id_c' client_cert, _ = self._create_certificates() cert_content = self._get_cert_content(client_cert) - b64str = b64encode( - f'{client_id_s}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id_s}:{client_secret}'.encode()) + .decode() + .strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' + 'Authorization': f'Basic {b64str}', } data = { 'grant_type': 'client_credentials', @@ -199,29 +197,33 @@ class OAuth2AuthnMethodsTests(test_v3.OAuth2RestfulTestCase): headers=headers, data=data, expected_status=client.OK, - client_cert_content=cert_content) + client_cert_content=cert_content, + ) mock_tls_client_auth.assert_called_once_with(client_id_c, cert_content) class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): APP_CRED_CREATE_URL = '/users/%(user_id)s/application_credentials' APP_CRED_LIST_URL = '/users/%(user_id)s/application_credentials' - APP_CRED_DELETE_URL = '/users/%(user_id)s/application_credentials/' \ - '%(app_cred_id)s' - APP_CRED_SHOW_URL = '/users/%(user_id)s/application_credentials/' \ - '%(app_cred_id)s' + APP_CRED_DELETE_URL = ( + '/users/%(user_id)s/application_credentials/' '%(app_cred_id)s' + ) + APP_CRED_SHOW_URL = ( + '/users/%(user_id)s/application_credentials/' '%(app_cred_id)s' + ) ACCESS_TOKEN_URL = '/OS-OAUTH2/token' def setUp(self): super(OAuth2SecretBasicTests, self).setUp() log.set_defaults( logging_context_format_string='%(asctime)s.%(msecs)03d %(' - 'color)s%(levelname)s %(name)s [^[[' - '01;36m%(request_id)s ^[[00;36m%(' - 'project_name)s %(user_name)s%(' - 'color)s] ^[[01;35m%(instance)s%(' - 'color)s%(message)s^[[00m', - default_log_levels=log.DEBUG) + 'color)s%(levelname)s %(name)s [^[[' + '01;36m%(request_id)s ^[[00;36m%(' + 'project_name)s %(user_name)s%(' + 'color)s] ^[[01;35m%(instance)s%(' + 'color)s%(message)s^[[00m', + default_log_levels=log.DEBUG, + ) CONF.log_opt_values(LOG, log.DEBUG) LOG.debug(f'is_debug_enabled: {log.is_debug_enabled(CONF)}') LOG.debug(f'get_default_log_levels: {log.get_default_log_levels()}') @@ -231,19 +233,18 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): ) def _assert_error_resp(self, error_resp, error_msg, error_description): - resp_keys = ( - 'error', 'error_description' - ) + resp_keys = ('error', 'error_description') for key in resp_keys: self.assertIsNotNone(error_resp.get(key, None)) self.assertEqual(error_msg, error_resp.get('error')) - self.assertEqual(error_description, - error_resp.get('error_description')) + self.assertEqual( + error_description, error_resp.get('error_description') + ) def _create_app_cred(self, user_id, app_cred_name): resp = self.post( self.APP_CRED_CREATE_URL % {'user_id': user_id}, - body={'application_credential': {'name': app_cred_name}} + body={'application_credential': {'name': app_cred_name}}, ) LOG.debug(f'resp: {resp}') app_ref = resp.result['application_credential'] @@ -251,55 +252,58 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): def _delete_app_cred(self, user_id, app_cred_id): resp = self.delete( - self.APP_CRED_CREATE_URL % {'user_id': user_id, - 'app_cred_id': app_cred_id}) + self.APP_CRED_CREATE_URL + % {'user_id': user_id, 'app_cred_id': app_cred_id} + ) LOG.debug(f'resp: {resp}') - def _get_access_token(self, app_cred, b64str, headers, data, - expected_status): + def _get_access_token( + self, app_cred, b64str, headers, data, expected_status + ): if b64str is None: client_id = app_cred.get('id') client_secret = app_cred.get('secret') - b64str = b64encode( - f'{client_id}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id}:{client_secret}'.encode()) + .decode() + .strip() + ) if headers is None: headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' + 'Authorization': f'Basic {b64str}', } if data is None: - data = { - 'grant_type': 'client_credentials' - } + data = {'grant_type': 'client_credentials'} data = parse.urlencode(data).encode() resp = self.post( self.ACCESS_TOKEN_URL, headers=headers, convert=False, body=data, - expected_status=expected_status) + expected_status=expected_status, + ) return resp - def _get_access_token_method_not_allowed(self, app_cred, - http_func): + def _get_access_token_method_not_allowed(self, app_cred, http_func): client_id = app_cred.get('id') client_secret = app_cred.get('secret') - b64str = b64encode( - f'{client_id}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id}:{client_secret}'.encode()).decode().strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' - } - data = { - 'grant_type': 'client_credentials' + 'Authorization': f'Basic {b64str}', } + data = {'grant_type': 'client_credentials'} data = parse.urlencode(data).encode() resp = http_func( self.ACCESS_TOKEN_URL, headers=headers, convert=False, body=data, - expected_status=client.METHOD_NOT_ALLOWED) + expected_status=client.METHOD_NOT_ALLOWED, + ) LOG.debug(f'response: {resp}') json_resp = jsonutils.loads(resp.body) return json_resp @@ -313,7 +317,8 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): b64str=None, headers=None, data=None, - expected_status=client.OK) + expected_status=client.OK, + ) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) self.assertEqual('Bearer', json_resp['token_type']) @@ -336,7 +341,8 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): b64str=None, headers=headers, data=data, - expected_status=client.OK) + expected_status=client.OK, + ) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) self.assertEqual('Bearer', json_resp['token_type']) @@ -348,81 +354,87 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): app_cred = self._create_app_cred(self.user_id, client_name) client_id = app_cred.get('id') - base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ - 'response="%s"' % ( - client_id, 'realm', 'nonce', 'path', 'responding') + base = ( + 'username="%s", realm="%s", nonce="%s", uri="%s", ' + 'response="%s"' + % (client_id, 'realm', 'nonce', 'path', 'responding') + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Digest {base}' + 'Authorization': f'Digest {base}', } error = 'invalid_client' error_description = 'Client authentication failed.' - resp = self._get_access_token(app_cred, - b64str=None, - headers=headers, - data=None, - expected_status=client.UNAUTHORIZED) + resp = self._get_access_token( + app_cred, + b64str=None, + headers=headers, + data=None, + expected_status=client.UNAUTHORIZED, + ) self.assertNotEmpty(resp.headers.get("WWW-Authenticate")) - self.assertEqual('Keystone uri="http://localhost/v3"', - resp.headers.get("WWW-Authenticate")) + self.assertEqual( + 'Keystone uri="http://localhost/v3"', + resp.headers.get("WWW-Authenticate"), + ) json_resp = jsonutils.loads(resp.body) LOG.debug(f'error: {json_resp.get("error")}') LOG.debug(f'error_description: {json_resp.get("error_description")}') - self.assertEqual(error, - json_resp.get('error')) - self.assertEqual(error_description, - json_resp.get('error_description')) + self.assertEqual(error, json_resp.get('error')) + self.assertEqual(error_description, json_resp.get('error_description')) def test_get_access_token_without_client_id(self): """Test case when there is no client_id.""" client_name = 'client_name_test' app_cred = self._create_app_cred(self.user_id, client_name) client_secret = app_cred.get('secret') - b64str = b64encode( - f':{client_secret}'.encode()).decode().strip() + b64str = b64encode(f':{client_secret}'.encode()).decode().strip() error = 'invalid_client' error_description = 'Client authentication failed.' - resp = self._get_access_token(app_cred, - b64str=b64str, - headers=None, - data=None, - expected_status=client.UNAUTHORIZED) + resp = self._get_access_token( + app_cred, + b64str=b64str, + headers=None, + data=None, + expected_status=client.UNAUTHORIZED, + ) self.assertNotEmpty(resp.headers.get("WWW-Authenticate")) - self.assertEqual('Keystone uri="http://localhost/v3"', - resp.headers.get("WWW-Authenticate")) + self.assertEqual( + 'Keystone uri="http://localhost/v3"', + resp.headers.get("WWW-Authenticate"), + ) json_resp = jsonutils.loads(resp.body) LOG.debug(f'error: {json_resp.get("error")}') LOG.debug(f'error_description: {json_resp.get("error_description")}') - self.assertEqual(error, - json_resp.get('error')) - self.assertEqual(error_description, - json_resp.get('error_description')) + self.assertEqual(error, json_resp.get('error')) + self.assertEqual(error_description, json_resp.get('error_description')) def test_get_access_token_without_client_secret(self): """Test case when there is no client_secret.""" client_name = 'client_name_test' app_cred = self._create_app_cred(self.user_id, client_name) client_id = app_cred.get('id') - b64str = b64encode( - f'{client_id}:'.encode()).decode().strip() + b64str = b64encode(f'{client_id}:'.encode()).decode().strip() error = 'invalid_client' error_description = 'Client authentication failed.' - resp = self._get_access_token(app_cred, - b64str=b64str, - headers=None, - data=None, - expected_status=client.UNAUTHORIZED) + resp = self._get_access_token( + app_cred, + b64str=b64str, + headers=None, + data=None, + expected_status=client.UNAUTHORIZED, + ) self.assertNotEmpty(resp.headers.get("WWW-Authenticate")) - self.assertEqual('Keystone uri="http://localhost/v3"', - resp.headers.get("WWW-Authenticate")) + self.assertEqual( + 'Keystone uri="http://localhost/v3"', + resp.headers.get("WWW-Authenticate"), + ) json_resp = jsonutils.loads(resp.body) LOG.debug(f'error: {json_resp.get("error")}') LOG.debug(f'error_description: {json_resp.get("error_description")}') - self.assertEqual(error, - json_resp.get('error')) - self.assertEqual(error_description, - json_resp.get('error_description')) + self.assertEqual(error, json_resp.get('error')) + self.assertEqual(error_description, json_resp.get('error_description')) def test_get_access_token_without_grant_type(self): """Test case when there is no grant_type.""" @@ -431,64 +443,61 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): data = {} error = 'invalid_request' error_description = 'The parameter grant_type is required.' - resp = self._get_access_token(app_cred, - b64str=None, - headers=None, - data=data, - expected_status=client.BAD_REQUEST) + resp = self._get_access_token( + app_cred, + b64str=None, + headers=None, + data=data, + expected_status=client.BAD_REQUEST, + ) json_resp = jsonutils.loads(resp.body) LOG.debug(f'error: {json_resp.get("error")}') LOG.debug(f'error_description: {json_resp.get("error_description")}') - self.assertEqual(error, - json_resp.get('error')) - self.assertEqual(error_description, - json_resp.get('error_description')) + self.assertEqual(error, json_resp.get('error')) + self.assertEqual(error_description, json_resp.get('error_description')) def test_get_access_token_blank_grant_type(self): """Test case when grant_type is blank.""" client_name = 'client_name_test' app_cred = self._create_app_cred(self.user_id, client_name) - data = { - 'grant_type': '' - } + data = {'grant_type': ''} error = 'unsupported_grant_type' - error_description = 'The parameter grant_type ' \ - ' is not supported.' - resp = self._get_access_token(app_cred, - b64str=None, - headers=None, - data=data, - expected_status=client.BAD_REQUEST) + error_description = 'The parameter grant_type ' ' is not supported.' + resp = self._get_access_token( + app_cred, + b64str=None, + headers=None, + data=data, + expected_status=client.BAD_REQUEST, + ) json_resp = jsonutils.loads(resp.body) LOG.debug(f'error: {json_resp.get("error")}') LOG.debug(f'error_description: {json_resp.get("error_description")}') - self.assertEqual(error, - json_resp.get('error')) - self.assertEqual(error_description, - json_resp.get('error_description')) + self.assertEqual(error, json_resp.get('error')) + self.assertEqual(error_description, json_resp.get('error_description')) def test_get_access_token_grant_type_is_not_client_credentials(self): """Test case when grant_type is not client_credentials.""" client_name = 'client_name_test' app_cred = self._create_app_cred(self.user_id, client_name) - data = { - 'grant_type': 'not_client_credentials' - } + data = {'grant_type': 'not_client_credentials'} error = 'unsupported_grant_type' - error_description = 'The parameter grant_type ' \ - 'not_client_credentials is not supported.' - resp = self._get_access_token(app_cred, - b64str=None, - headers=None, - data=data, - expected_status=client.BAD_REQUEST) + error_description = ( + 'The parameter grant_type ' + 'not_client_credentials is not supported.' + ) + resp = self._get_access_token( + app_cred, + b64str=None, + headers=None, + data=data, + expected_status=client.BAD_REQUEST, + ) json_resp = jsonutils.loads(resp.body) LOG.debug(f'error: {json_resp.get("error")}') LOG.debug(f'error_description: {json_resp.get("error_description")}') - self.assertEqual(error, - json_resp.get('error')) - self.assertEqual(error_description, - json_resp.get('error_description')) + self.assertEqual(error, json_resp.get('error')) + self.assertEqual(error_description, json_resp.get('error_description')) def test_get_access_token_failed_401(self): """Test case when client authentication failed.""" @@ -498,35 +507,37 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): client_id = app_cred.get('id') client_secret = app_cred.get('secret') - b64str = b64encode( - f'{client_id}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id}:{client_secret}'.encode()).decode().strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' - } - data = { - 'grant_type': 'client_credentials' + 'Authorization': f'Basic {b64str}', } + data = {'grant_type': 'client_credentials'} data = parse.urlencode(data).encode() with mock.patch( - 'keystone.api._shared.authentication.' - 'authenticate_for_token') as co_mock: + 'keystone.api._shared.authentication.' 'authenticate_for_token' + ) as co_mock: co_mock.side_effect = exception.Unauthorized( - 'client is unauthorized') + 'client is unauthorized' + ) resp = self.post( self.ACCESS_TOKEN_URL, headers=headers, convert=False, body=data, noauth=True, - expected_status=client.UNAUTHORIZED) + expected_status=client.UNAUTHORIZED, + ) self.assertNotEmpty(resp.headers.get("WWW-Authenticate")) - self.assertEqual('Keystone uri="http://localhost/v3"', - resp.headers.get("WWW-Authenticate")) + self.assertEqual( + 'Keystone uri="http://localhost/v3"', + resp.headers.get("WWW-Authenticate"), + ) LOG.debug(f'response: {resp}') json_resp = jsonutils.loads(resp.body) - self.assertEqual(error, - json_resp.get('error')) + self.assertEqual(error, json_resp.get('error')) LOG.debug(f'error: {json_resp.get("error")}') def test_get_access_token_failed_400(self): @@ -536,32 +547,32 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): error = 'invalid_request' client_id = app_cred.get('id') client_secret = app_cred.get('secret') - b64str = b64encode( - f'{client_id}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id}:{client_secret}'.encode()).decode().strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' - } - data = { - 'grant_type': 'client_credentials' + 'Authorization': f'Basic {b64str}', } + data = {'grant_type': 'client_credentials'} data = parse.urlencode(data).encode() with mock.patch( - 'keystone.api._shared.authentication.' - 'authenticate_for_token') as co_mock: + 'keystone.api._shared.authentication.' 'authenticate_for_token' + ) as co_mock: co_mock.side_effect = exception.ValidationError( - 'Auth method is invalid') + 'Auth method is invalid' + ) resp = self.post( self.ACCESS_TOKEN_URL, headers=headers, convert=False, body=data, noauth=True, - expected_status=client.BAD_REQUEST) + expected_status=client.BAD_REQUEST, + ) LOG.debug(f'response: {resp}') json_resp = jsonutils.loads(resp.body) - self.assertEqual(error, - json_resp.get('error')) + self.assertEqual(error, json_resp.get('error')) LOG.debug(f'error: {json_resp.get("error")}') def test_get_access_token_failed_500_other(self): @@ -571,33 +582,33 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): error = 'other_error' client_id = app_cred.get('id') client_secret = app_cred.get('secret') - b64str = b64encode( - f'{client_id}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id}:{client_secret}'.encode()).decode().strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' - } - data = { - 'grant_type': 'client_credentials' + 'Authorization': f'Basic {b64str}', } + data = {'grant_type': 'client_credentials'} data = parse.urlencode(data).encode() with mock.patch( - 'keystone.api._shared.authentication.' - 'authenticate_for_token') as co_mock: + 'keystone.api._shared.authentication.' 'authenticate_for_token' + ) as co_mock: co_mock.side_effect = exception.UnexpectedError( - 'unexpected error.') + 'unexpected error.' + ) resp = self.post( self.ACCESS_TOKEN_URL, headers=headers, convert=False, body=data, noauth=True, - expected_status=client.INTERNAL_SERVER_ERROR) + expected_status=client.INTERNAL_SERVER_ERROR, + ) LOG.debug(f'response: {resp}') json_resp = jsonutils.loads(resp.body) - self.assertEqual(error, - json_resp.get('error')) + self.assertEqual(error, json_resp.get('error')) def test_get_access_token_failed_500(self): """Test case when internal server error.""" @@ -606,77 +617,83 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): error = 'other_error' client_id = app_cred.get('id') client_secret = app_cred.get('secret') - b64str = b64encode( - f'{client_id}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id}:{client_secret}'.encode()).decode().strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' - } - data = { - 'grant_type': 'client_credentials' + 'Authorization': f'Basic {b64str}', } + data = {'grant_type': 'client_credentials'} data = parse.urlencode(data).encode() with mock.patch( - 'keystone.api._shared.authentication.' - 'authenticate_for_token') as co_mock: - co_mock.side_effect = Exception( - 'Internal server is invalid') + 'keystone.api._shared.authentication.' 'authenticate_for_token' + ) as co_mock: + co_mock.side_effect = Exception('Internal server is invalid') resp = self.post( self.ACCESS_TOKEN_URL, headers=headers, convert=False, body=data, noauth=True, - expected_status=client.INTERNAL_SERVER_ERROR) + expected_status=client.INTERNAL_SERVER_ERROR, + ) LOG.debug(f'response: {resp}') json_resp = jsonutils.loads(resp.body) - self.assertEqual(error, - json_resp.get('error')) + self.assertEqual(error, json_resp.get('error')) def test_get_access_token_method_get_not_allowed(self): """Test case when the request is get method that is not allowed.""" client_name = 'client_name_test' app_cred = self._create_app_cred(self.user_id, client_name) json_resp = self._get_access_token_method_not_allowed( - app_cred, self.get) - self.assertEqual('other_error', - json_resp.get('error')) - self.assertEqual('The method is not allowed for the requested URL.', - json_resp.get('error_description')) + app_cred, self.get + ) + self.assertEqual('other_error', json_resp.get('error')) + self.assertEqual( + 'The method is not allowed for the requested URL.', + json_resp.get('error_description'), + ) def test_get_access_token_method_patch_not_allowed(self): """Test case when the request is patch method that is not allowed.""" client_name = 'client_name_test' app_cred = self._create_app_cred(self.user_id, client_name) json_resp = self._get_access_token_method_not_allowed( - app_cred, self.patch) - self.assertEqual('other_error', - json_resp.get('error')) - self.assertEqual('The method is not allowed for the requested URL.', - json_resp.get('error_description')) + app_cred, self.patch + ) + self.assertEqual('other_error', json_resp.get('error')) + self.assertEqual( + 'The method is not allowed for the requested URL.', + json_resp.get('error_description'), + ) def test_get_access_token_method_put_not_allowed(self): """Test case when the request is put method that is not allowed.""" client_name = 'client_name_test' app_cred = self._create_app_cred(self.user_id, client_name) json_resp = self._get_access_token_method_not_allowed( - app_cred, self.put) - self.assertEqual('other_error', - json_resp.get('error')) - self.assertEqual('The method is not allowed for the requested URL.', - json_resp.get('error_description')) + app_cred, self.put + ) + self.assertEqual('other_error', json_resp.get('error')) + self.assertEqual( + 'The method is not allowed for the requested URL.', + json_resp.get('error_description'), + ) def test_get_access_token_method_delete_not_allowed(self): """Test case when the request is delete method that is not allowed.""" client_name = 'client_name_test' app_cred = self._create_app_cred(self.user_id, client_name) json_resp = self._get_access_token_method_not_allowed( - app_cred, self.delete) - self.assertEqual('other_error', - json_resp.get('error')) - self.assertEqual('The method is not allowed for the requested URL.', - json_resp.get('error_description')) + app_cred, self.delete + ) + self.assertEqual('other_error', json_resp.get('error')) + self.assertEqual( + 'The method is not allowed for the requested URL.', + json_resp.get('error_description'), + ) def test_get_access_token_method_head_not_allowed(self): """Test case when the request is head method that is not allowed.""" @@ -684,17 +701,19 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase): app_cred = self._create_app_cred(self.user_id, client_name) client_id = app_cred.get('id') client_secret = app_cred.get('secret') - b64str = b64encode( - f'{client_id}:{client_secret}'.encode()).decode().strip() + b64str = ( + b64encode(f'{client_id}:{client_secret}'.encode()).decode().strip() + ) headers = { 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': f'Basic {b64str}' + 'Authorization': f'Basic {b64str}', } self.head( self.ACCESS_TOKEN_URL, headers=headers, convert=False, - expected_status=client.METHOD_NOT_ALLOWED) + expected_status=client.METHOD_NOT_ALLOWED, + ) class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): @@ -703,10 +722,12 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): def setUp(self): super(OAuth2CertificateTests, self).setUp() self.log_fix = self.useFixture(fixtures.FakeLogger(level=log.DEBUG)) - self.config_fixture.config(group='oauth2', - oauth2_authn_methods=['tls_client_auth']) - self.config_fixture.config(group='oauth2', - oauth2_cert_dn_mapping_id='oauth2_mapping') + self.config_fixture.config( + group='oauth2', oauth2_authn_methods=['tls_client_auth'] + ) + self.config_fixture.config( + group='oauth2', oauth2_cert_dn_mapping_id='oauth2_mapping' + ) ( self.oauth2_user, self.oauth2_user_domain, @@ -718,7 +739,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=self.oauth2_user.get('name'), email_address=self.oauth2_user.get('email'), domain_component=self.oauth2_user_domain.get('id'), - organization_name=self.oauth2_user_domain.get('name') + organization_name=self.oauth2_user_domain.get('name'), ) ) @@ -731,27 +752,29 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): PROVIDERS.resource_api.create_project( new_project_ref['id'], new_project_ref ) - new_user = unit.create_user(PROVIDERS.identity_api, - domain_id=new_domain_ref['id'], - project_id=new_project_ref['id']) + new_user = unit.create_user( + PROVIDERS.identity_api, + domain_id=new_domain_ref['id'], + project_id=new_project_ref['id'], + ) if not no_roles: PROVIDERS.assignment_api.create_grant( self.role['id'], user_id=new_user['id'], - project_id=new_project_ref['id']) + project_id=new_project_ref['id'], + ) return new_user, new_domain_ref, new_project_ref - def _create_certificates(self, - root_dn=None, - server_dn=None, - client_dn=None): + def _create_certificates( + self, root_dn=None, server_dn=None, client_dn=None + ): root_subj = unit.create_dn( country_name='jp', state_or_province_name='kanagawa', locality_name='kawasaki', organization_name='fujitsu', organizational_unit_name='test', - common_name='root' + common_name='root', ) if root_dn: root_subj = unit.update_dn(root_subj, root_dn) @@ -763,24 +786,26 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): locality_name='kawasaki', organization_name='fujitsu', organizational_unit_name='test', - common_name='keystone.local' + common_name='keystone.local', ) if server_dn: keystone_subj = unit.update_dn(keystone_subj, server_dn) ks_cert, ks_key = unit.create_certificate( - keystone_subj, ca=root_cert, ca_key=root_key) + keystone_subj, ca=root_cert, ca_key=root_key + ) client_subj = unit.create_dn( country_name='jp', state_or_province_name='kanagawa', locality_name='kawasaki', - organizational_unit_name='test' + organizational_unit_name='test', ) if client_dn: client_subj = unit.update_dn(client_subj, client_dn) client_cert, client_key = unit.create_certificate( - client_subj, ca=root_cert, ca_key=root_key) + client_subj, ca=root_cert, ca_key=root_key + ) return root_cert, root_key, ks_cert, ks_key, client_cert, client_key def _create_mapping(self, id='oauth2_mapping', dn_rules=None): @@ -793,7 +818,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', - 'SSL_CLIENT_ISSUER_DN_CN': ['root'] + 'SSL_CLIENT_ISSUER_DN_CN': ['root'], } ] for info in dn_rules: @@ -825,35 +850,24 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): remote.append({'type': info.get(k)}) index += 1 else: - remote.append({ - 'type': k, - 'any_one_of': info.get(k) - }) - rule = { - 'local': [ - { - 'user': local_user - } - ], - 'remote': remote - } + remote.append({'type': k, 'any_one_of': info.get(k)}) + rule = {'local': [{'user': local_user}], 'remote': remote} rules.append(rule) - mapping = { - 'id': id, - 'rules': rules - } + mapping = {'id': id, 'rules': rules} PROVIDERS.federation_api.create_mapping(mapping['id'], mapping) - def _get_access_token(self, client_id=None, client_cert_content=None, - expected_status=http.client.OK): + def _get_access_token( + self, + client_id=None, + client_cert_content=None, + expected_status=http.client.OK, + ): headers = { 'Content-Type': 'application/x-www-form-urlencoded', } - data = { - 'grant_type': 'client_credentials' - } + data = {'grant_type': 'client_credentials'} if client_id: data.update({'client_id': client_id}) data = parse.urlencode(data).encode() @@ -862,15 +876,13 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): 'noauth': True, 'convert': False, 'body': data, - 'expected_status': expected_status + 'expected_status': expected_status, } if client_cert_content: - kwargs.update({'environ': { - 'SSL_CLIENT_CERT': client_cert_content - }}) - resp = self.post( - self.ACCESS_TOKEN_URL, - **kwargs) + kwargs.update( + {'environ': {'SSL_CLIENT_CERT': client_cert_content}} + ) + resp = self.post(self.ACCESS_TOKEN_URL, **kwargs) return resp def _get_cert_content(self, cert): @@ -881,8 +893,8 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): json_resp = jsonutils.loads(resp.body) self.assertEqual('invalid_client', json_resp['error']) self.assertEqual( - 'Client authentication failed.', - json_resp['error_description']) + 'Client authentication failed.', json_resp['error_description'] + ) def test_get_access_token_project_scope(self): """Test case when an access token can be successfully obtain.""" @@ -898,14 +910,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -916,33 +928,39 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) def test_get_access_token_mapping_config(self): """Test case when an access token can be successfully obtain.""" - self.config_fixture.config(group='oauth2', - oauth2_cert_dn_mapping_id='oauth2_custom') + self.config_fixture.config( + group='oauth2', oauth2_cert_dn_mapping_id='oauth2_custom' + ) self._create_mapping( id='oauth2_custom', dn_rules=[ { 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_DC', - 'SSL_CLIENT_ISSUER_DN_CN': ['root'] + 'SSL_CLIENT_ISSUER_DN_CN': ['root'], } - ]) + ], + ) user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( @@ -954,14 +972,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id='test_UID', common_name=user.get('name'), domain_component=user_domain.get('name'), - organization_name='test_O' + organization_name='test_O', ) ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -972,27 +990,33 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) - self.config_fixture.config(group='oauth2', - oauth2_cert_dn_mapping_id='oauth2_mapping') + self.config_fixture.config( + group='oauth2', oauth2_cert_dn_mapping_id='oauth2_mapping' + ) def test_get_access_token_mapping_multi_ca(self): """Test case when an access token can be successfully obtain.""" - self.config_fixture.config(group='oauth2', - oauth2_cert_dn_mapping_id='oauth2_custom') + self.config_fixture.config( + group='oauth2', oauth2_cert_dn_mapping_id='oauth2_custom' + ) self._create_mapping( id='oauth2_custom', dn_rules=[ @@ -1002,21 +1026,20 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', - 'SSL_CLIENT_ISSUER_DN_CN': ['rootA', 'rootB'] + 'SSL_CLIENT_ISSUER_DN_CN': ['rootA', 'rootB'], }, { 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_DC', - 'SSL_CLIENT_ISSUER_DN_CN': ['rootC'] - } - ]) + 'SSL_CLIENT_ISSUER_DN_CN': ['rootC'], + }, + ], + ) # CA rootA OK user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( - root_dn=unit.create_dn( - common_name='rootA' - ), + root_dn=unit.create_dn(common_name='rootA'), client_dn=unit.create_dn( country_name='jp', state_or_province_name='kanagawa', @@ -1026,14 +1049,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') - ) + organization_name=user_domain.get('name'), + ), ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1044,26 +1067,28 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) # CA rootB OK user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( - root_dn=unit.create_dn( - common_name='rootB' - ), + root_dn=unit.create_dn(common_name='rootB'), client_dn=unit.create_dn( country_name='jp', state_or_province_name='kanagawa', @@ -1073,14 +1098,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') - ) + organization_name=user_domain.get('name'), + ), ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1091,26 +1116,28 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) # CA rootC OK user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( - root_dn=unit.create_dn( - common_name='rootC' - ), + root_dn=unit.create_dn(common_name='rootC'), client_dn=unit.create_dn( country_name='jp', state_or_province_name='kanagawa', @@ -1119,14 +1146,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id='test_UID', common_name=user.get('name'), domain_component=user_domain.get('name'), - organization_name='test_O' - ) + organization_name='test_O', + ), ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1137,26 +1164,28 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) # CA not found NG user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( - root_dn=unit.create_dn( - common_name='root_other' - ), + root_dn=unit.create_dn(common_name='root_other'), client_dn=unit.create_dn( country_name='jp', state_or_province_name='kanagawa', @@ -1166,23 +1195,24 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') - ) + organization_name=user_domain.get('name'), + ), ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( - 'Get OAuth2.0 Access Token API: ' - 'mapping rule process failed.', - self.log_fix.output) + 'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.', + self.log_fix.output, + ) - self.config_fixture.config(group='oauth2', - oauth2_cert_dn_mapping_id='oauth2_mapping') + self.config_fixture.config( + group='oauth2', oauth2_cert_dn_mapping_id='oauth2_mapping' + ) def test_get_access_token_no_default_mapping(self): user, user_domain, _ = self._create_project_user() @@ -1196,7 +1226,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1204,17 +1234,19 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( 'Get OAuth2.0 Access Token API: ' 'mapping id %s is not found. ' % 'oauth2_mapping', - self.log_fix.output) + self.log_fix.output, + ) def test_get_access_token_no_custom_mapping(self): - self.config_fixture.config(group='oauth2', - oauth2_cert_dn_mapping_id='oauth2_custom') + self.config_fixture.config( + group='oauth2', oauth2_cert_dn_mapping_id='oauth2_custom' + ) self._create_mapping() user, user_domain, _ = self._create_project_user() *_, client_cert, _ = self._create_certificates( @@ -1227,7 +1259,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1235,26 +1267,31 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( 'Get OAuth2.0 Access Token API: ' 'mapping id %s is not found. ' % 'oauth2_custom', - self.log_fix.output) - self.config_fixture.config(group='oauth2', - oauth2_cert_dn_mapping_id='oauth2_mapping') + self.log_fix.output, + ) + self.config_fixture.config( + group='oauth2', oauth2_cert_dn_mapping_id='oauth2_mapping' + ) def test_get_access_token_ignore_userid(self): """Test case when an access token can be successfully obtain.""" - self._create_mapping(dn_rules=[ - { - 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', - 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', - 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', - 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', - 'SSL_CLIENT_ISSUER_DN_CN': ['root'] - }]) + self._create_mapping( + dn_rules=[ + { + 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', + 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', + 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', + 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', + 'SSL_CLIENT_ISSUER_DN_CN': ['root'], + } + ] + ) user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( @@ -1267,14 +1304,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1285,30 +1322,37 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) def test_get_access_token_ignore_username(self): """Test case when an access token can be successfully obtain.""" - self._create_mapping(dn_rules=[ - { - 'user.id': 'SSL_CLIENT_SUBJECT_DN_UID', - 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', - 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', - 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', - 'SSL_CLIENT_ISSUER_DN_CN': ['root'] - }]) + self._create_mapping( + dn_rules=[ + { + 'user.id': 'SSL_CLIENT_SUBJECT_DN_UID', + 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', + 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', + 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', + 'SSL_CLIENT_ISSUER_DN_CN': ['root'], + } + ] + ) user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( client_dn=unit.create_dn( @@ -1319,14 +1363,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id=user.get('id'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1337,30 +1381,37 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) def test_get_access_token_ignore_email(self): """Test case when an access token can be successfully obtain.""" - self._create_mapping(dn_rules=[ - { - 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', - 'user.id': 'SSL_CLIENT_SUBJECT_DN_UID', - 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', - 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', - 'SSL_CLIENT_ISSUER_DN_CN': ['root'] - }]) + self._create_mapping( + dn_rules=[ + { + 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', + 'user.id': 'SSL_CLIENT_SUBJECT_DN_UID', + 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', + 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', + 'SSL_CLIENT_ISSUER_DN_CN': ['root'], + } + ] + ) user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( client_dn=unit.create_dn( @@ -1371,14 +1422,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id=user.get('id'), common_name=user.get('name'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1389,30 +1440,37 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) def test_get_access_token_ignore_domain_id(self): """Test case when an access token can be successfully obtain.""" - self._create_mapping(dn_rules=[ - { - 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', - 'user.id': 'SSL_CLIENT_SUBJECT_DN_UID', - 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', - 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', - 'SSL_CLIENT_ISSUER_DN_CN': ['root'] - }]) + self._create_mapping( + dn_rules=[ + { + 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', + 'user.id': 'SSL_CLIENT_SUBJECT_DN_UID', + 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', + 'user.domain.name': 'SSL_CLIENT_SUBJECT_DN_O', + 'SSL_CLIENT_ISSUER_DN_CN': ['root'], + } + ] + ) user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( client_dn=unit.create_dn( @@ -1424,14 +1482,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id') + "_diff", - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1442,30 +1500,37 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) def test_get_access_token_ignore_domain_name(self): """Test case when an access token can be successfully obtain.""" - self._create_mapping(dn_rules=[ - { - 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', - 'user.id': 'SSL_CLIENT_SUBJECT_DN_UID', - 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', - 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', - 'SSL_CLIENT_ISSUER_DN_CN': ['root'] - }]) + self._create_mapping( + dn_rules=[ + { + 'user.name': 'SSL_CLIENT_SUBJECT_DN_CN', + 'user.id': 'SSL_CLIENT_SUBJECT_DN_UID', + 'user.email': 'SSL_CLIENT_SUBJECT_DN_EMAILADDRESS', + 'user.domain.id': 'SSL_CLIENT_SUBJECT_DN_DC', + 'SSL_CLIENT_ISSUER_DN_CN': ['root'], + } + ] + ) user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( client_dn=unit.create_dn( @@ -1476,14 +1541,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id=user.get('id'), common_name=user.get('name'), email_address=user.get('email'), - domain_component=user_domain.get('id') + domain_component=user_domain.get('id'), ) ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1494,26 +1559,27 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) def test_get_access_token_ignore_all(self): """Test case when an access token can be successfully obtain.""" - self._create_mapping(dn_rules=[ - { - 'SSL_CLIENT_ISSUER_DN_CN': ['root'] - }]) + self._create_mapping(dn_rules=[{'SSL_CLIENT_ISSUER_DN_CN': ['root']}]) user, user_domain, user_project = self._create_project_user() *_, client_cert, _ = self._create_certificates( client_dn=unit.create_dn( @@ -1524,14 +1590,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id=user.get('id') + "_diff", common_name=user.get('name') + "_diff", email_address=user.get('email') + "_diff", - domain_component=user_domain.get('id') + "_diff" + domain_component=user_domain.get('id') + "_diff", ) ) cert_content = self._get_cert_content(client_cert) resp = self._get_access_token( - client_id=user.get('id'), - client_cert_content=cert_content) + client_id=user.get('id'), client_cert_content=cert_content + ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertIn('access_token', json_resp) @@ -1542,19 +1608,23 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): '/auth/tokens', headers={ 'X-Subject-Token': json_resp['access_token'], - 'X-Auth-Token': json_resp['access_token'] - } + 'X-Auth-Token': json_resp['access_token'], + }, ) self.assertIn('token', verify_resp.result) self.assertIn('oauth2_credential', verify_resp.result['token']) self.assertIn('roles', verify_resp.result['token']) self.assertIn('project', verify_resp.result['token']) self.assertIn('catalog', verify_resp.result['token']) - self.assertEqual(user_project.get('id'), - verify_resp.result['token']['project']['id']) + self.assertEqual( + user_project.get('id'), + verify_resp.result['token']['project']['id'], + ) check_oauth2 = verify_resp.result['token']['oauth2_credential'] - self.assertEqual(utils.get_certificate_thumbprint(cert_content), - check_oauth2['x5t#S256']) + self.assertEqual( + utils.get_certificate_thumbprint(cert_content), + check_oauth2['x5t#S256'], + ) def test_get_access_token_no_roles_project_scope(self): self._create_mapping() @@ -1569,7 +1639,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1577,7 +1647,8 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED) + expected_status=http.client.UNAUTHORIZED, + ) LOG.debug(resp) def test_get_access_token_no_default_project_id(self): @@ -1594,7 +1665,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1602,64 +1673,77 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): _ = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED) + expected_status=http.client.UNAUTHORIZED, + ) def test_get_access_token_without_client_id(self): self._create_mapping() cert_content = self._get_cert_content(self.client_cert) resp = self._get_access_token( client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) - self.assertIn('Get OAuth2.0 Access Token API: ' - 'failed to get a client_id from the request.', - self.log_fix.output) + self.assertIn( + 'Get OAuth2.0 Access Token API: ' + 'failed to get a client_id from the request.', + self.log_fix.output, + ) def test_get_access_token_without_client_cert(self): self._create_mapping() resp = self._get_access_token( client_id=self.oauth2_user.get('id'), - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) - self.assertIn('Get OAuth2.0 Access Token API: ' - 'failed to get client credentials from the request.', - self.log_fix.output) + self.assertIn( + 'Get OAuth2.0 Access Token API: ' + 'failed to get client credentials from the request.', + self.log_fix.output, + ) @mock.patch.object(utils, 'get_certificate_subject_dn') def test_get_access_token_failed_to_get_cert_subject_dn( - self, mock_get_certificate_subject_dn): + self, mock_get_certificate_subject_dn + ): self._create_mapping() - mock_get_certificate_subject_dn.side_effect = \ + mock_get_certificate_subject_dn.side_effect = ( exception.ValidationError('Boom!') + ) cert_content = self._get_cert_content(self.client_cert) resp = self._get_access_token( client_id=self.oauth2_user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) - self.assertIn('Get OAuth2.0 Access Token API: ' - 'failed to get the subject DN from the certificate.', - self.log_fix.output) + self.assertIn( + 'Get OAuth2.0 Access Token API: ' + 'failed to get the subject DN from the certificate.', + self.log_fix.output, + ) @mock.patch.object(utils, 'get_certificate_issuer_dn') def test_get_access_token_failed_to_get_cert_issuer_dn( - self, mock_get_certificate_issuer_dn): + self, mock_get_certificate_issuer_dn + ): self._create_mapping() - mock_get_certificate_issuer_dn.side_effect = \ - exception.ValidationError('Boom!') + mock_get_certificate_issuer_dn.side_effect = exception.ValidationError( + 'Boom!' + ) cert_content = self._get_cert_content(self.client_cert) resp = self._get_access_token( client_id=self.oauth2_user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) - self.assertIn('Get OAuth2.0 Access Token API: ' - 'failed to get the issuer DN from the certificate.', - self.log_fix.output) + self.assertIn( + 'Get OAuth2.0 Access Token API: ' + 'failed to get the issuer DN from the certificate.', + self.log_fix.output, + ) def test_get_access_token_user_not_exist(self): self._create_mapping() @@ -1668,14 +1752,14 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user_id_not_exist, client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( 'Get OAuth2.0 Access Token API: ' - 'the user does not exist. user id: %s' - % user_id_not_exist, - self.log_fix.output) + 'the user does not exist. user id: %s' % user_id_not_exist, + self.log_fix.output, + ) def test_get_access_token_cert_dn_not_match_user_id(self): self._create_mapping() @@ -1690,7 +1774,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1698,16 +1782,15 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( 'Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.' % ( - 'user id', - user.get('id') + '_diff', - user.get('id')), - self.log_fix.output) + 'DN value: %s, DB value: %s.' + % ('user id', user.get('id') + '_diff', user.get('id')), + self.log_fix.output, + ) def test_get_access_token_cert_dn_not_match_user_name(self): self._create_mapping() @@ -1722,7 +1805,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name') + "_diff", email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1730,16 +1813,15 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( 'Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.' % ( - 'user name', - user.get('name') + '_diff', - user.get('name')), - self.log_fix.output) + 'DN value: %s, DB value: %s.' + % ('user name', user.get('name') + '_diff', user.get('name')), + self.log_fix.output, + ) def test_get_access_token_cert_dn_not_match_email(self): self._create_mapping() @@ -1754,7 +1836,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email') + "_diff", domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1762,16 +1844,15 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( 'Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.' % ( - 'user email', - user.get('email') + '_diff', - user.get('email')), - self.log_fix.output) + 'DN value: %s, DB value: %s.' + % ('user email', user.get('email') + '_diff', user.get('email')), + self.log_fix.output, + ) def test_get_access_token_cert_dn_not_match_domain_id(self): self._create_mapping() @@ -1786,7 +1867,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id') + "_diff", - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1794,16 +1875,19 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( 'Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.' % ( + 'DN value: %s, DB value: %s.' + % ( 'user domain id', user_domain.get('id') + '_diff', - user_domain.get('id')), - self.log_fix.output) + user_domain.get('id'), + ), + self.log_fix.output, + ) def test_get_access_token_cert_dn_not_match_domain_name(self): self._create_mapping() @@ -1818,7 +1902,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + "_diff" + organization_name=user_domain.get('name') + "_diff", ) ) @@ -1826,16 +1910,19 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( 'Get OAuth2.0 Access Token API: %s check failed. ' - 'DN value: %s, DB value: %s.' % ( + 'DN value: %s, DB value: %s.' + % ( 'user domain name', user_domain.get('name') + '_diff', - user_domain.get('name')), - self.log_fix.output) + user_domain.get('name'), + ), + self.log_fix.output, + ) def test_get_access_token_cert_dn_missing_user_id(self): self._create_mapping() @@ -1849,7 +1936,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): common_name=user.get('name'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1857,13 +1944,13 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( - 'Get OAuth2.0 Access Token API: ' - 'mapping rule process failed.', - self.log_fix.output) + 'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.', + self.log_fix.output, + ) def test_get_access_token_cert_dn_missing_user_name(self): self._create_mapping() @@ -1877,7 +1964,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id=user.get('id'), email_address=user.get('email'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1885,13 +1972,13 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( - 'Get OAuth2.0 Access Token API: ' - 'mapping rule process failed.', - self.log_fix.output) + 'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.', + self.log_fix.output, + ) def test_get_access_token_cert_dn_missing_email(self): self._create_mapping() @@ -1905,7 +1992,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id=user.get('id'), common_name=user.get('name'), domain_component=user_domain.get('id'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1913,13 +2000,13 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( - 'Get OAuth2.0 Access Token API: ' - 'mapping rule process failed.', - self.log_fix.output) + 'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.', + self.log_fix.output, + ) def test_get_access_token_cert_dn_missing_domain_id(self): self._create_mapping() @@ -1933,7 +2020,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): user_id=user.get('id'), common_name=user.get('name'), email_address=user.get('email'), - organization_name=user_domain.get('name') + organization_name=user_domain.get('name'), ) ) @@ -1941,13 +2028,13 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( - 'Get OAuth2.0 Access Token API: ' - 'mapping rule process failed.', - self.log_fix.output) + 'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.', + self.log_fix.output, + ) def test_get_access_token_cert_dn_missing_domain_name(self): self._create_mapping() @@ -1969,13 +2056,13 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) self.assertUnauthorizedResp(resp) self.assertIn( - 'Get OAuth2.0 Access Token API: ' - 'mapping rule process failed.', - self.log_fix.output) + 'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.', + self.log_fix.output, + ) @mock.patch.object(Manager, 'issue_token') def test_get_access_token_issue_token_ks_error_400(self, mock_issue_token): @@ -1986,7 +2073,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=self.oauth2_user.get('id'), client_cert_content=cert_content, - expected_status=http.client.BAD_REQUEST + expected_status=http.client.BAD_REQUEST, ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) @@ -2003,18 +2090,20 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=self.oauth2_user.get('id'), client_cert_content=cert_content, - expected_status=http.client.UNAUTHORIZED + expected_status=http.client.UNAUTHORIZED, ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertEqual('invalid_client', json_resp['error']) self.assertEqual( 'The request you have made requires authentication.', - json_resp['error_description']) + json_resp['error_description'], + ) @mock.patch.object(Manager, 'issue_token') def test_get_access_token_issue_token_ks_error_other( - self, mock_issue_token): + self, mock_issue_token + ): self._create_mapping() err_msg = 'Boom!' mock_issue_token.side_effect = exception.NotImplemented(err_msg) @@ -2022,7 +2111,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=self.oauth2_user.get('id'), client_cert_content=cert_content, - expected_status=exception.NotImplemented.code + expected_status=exception.NotImplemented.code, ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) @@ -2030,11 +2119,13 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): self.assertEqual( 'An unknown error occurred and failed to get an OAuth2.0 ' 'access token.', - json_resp['error_description']) + json_resp['error_description'], + ) @mock.patch.object(Manager, 'issue_token') def test_get_access_token_issue_token_other_exception( - self, mock_issue_token): + self, mock_issue_token + ): self._create_mapping() err_msg = 'Boom!' mock_issue_token.side_effect = Exception(err_msg) @@ -2042,7 +2133,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=self.oauth2_user.get('id'), client_cert_content=cert_content, - expected_status=http.client.INTERNAL_SERVER_ERROR + expected_status=http.client.INTERNAL_SERVER_ERROR, ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) @@ -2050,8 +2141,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): self.assertEqual(err_msg, json_resp['error_description']) @mock.patch.object(RuleProcessor, 'process') - def test_get_access_token_process_other_exception( - self, mock_process): + def test_get_access_token_process_other_exception(self, mock_process): self._create_mapping() err_msg = 'Boom!' mock_process.side_effect = Exception(err_msg) @@ -2059,13 +2149,13 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase): resp = self._get_access_token( client_id=self.oauth2_user.get('id'), client_cert_content=cert_content, - expected_status=http.client.INTERNAL_SERVER_ERROR + expected_status=http.client.INTERNAL_SERVER_ERROR, ) LOG.debug(resp) json_resp = jsonutils.loads(resp.body) self.assertEqual('other_error', json_resp['error']) self.assertEqual(err_msg, json_resp['error_description']) self.assertIn( - 'Get OAuth2.0 Access Token API: ' - 'mapping rule process failed.', - self.log_fix.output) + 'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.', + self.log_fix.output, + ) diff --git a/keystone/tests/unit/test_v3_os_revoke.py b/keystone/tests/unit/test_v3_os_revoke.py index fbcfd410e7..37bd05d792 100644 --- a/keystone/tests/unit/test_v3_os_revoke.py +++ b/keystone/tests/unit/test_v3_os_revoke.py @@ -55,19 +55,28 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): def assertReportedEventMatchesRecorded(self, event, sample, before_time): after_time = timeutils.utcnow() event_issued_before = timeutils.normalize_time( - timeutils.parse_isotime(event['issued_before'])) + timeutils.parse_isotime(event['issued_before']) + ) self.assertLessEqual( - before_time, event_issued_before, - 'invalid event issued_before time; %s is not later than %s.' % ( + before_time, + event_issued_before, + 'invalid event issued_before time; %s is not later than %s.' + % ( utils.isotime(event_issued_before, subsecond=True), - utils.isotime(before_time, subsecond=True))) + utils.isotime(before_time, subsecond=True), + ), + ) self.assertLessEqual( - event_issued_before, after_time, - 'invalid event issued_before time; %s is not earlier than %s.' % ( + event_issued_before, + after_time, + 'invalid event issued_before time; %s is not earlier than %s.' + % ( utils.isotime(event_issued_before, subsecond=True), - utils.isotime(after_time, subsecond=True))) - del (event['issued_before']) - del (event['revoked_at']) + utils.isotime(after_time, subsecond=True), + ), + ) + del event['issued_before'] + del event['revoked_at'] self.assertEqual(sample, event) def test_revoked_list_self_url(self): @@ -93,7 +102,8 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): sample['project_id'] = str(project_id) before_time = timeutils.utcnow().replace(microsecond=0) PROVIDERS.revoke_api.revoke( - revoke_model.RevokeEvent(project_id=project_id)) + revoke_model.RevokeEvent(project_id=project_id) + ) resp = self.get('/OS-REVOKE/events') events = resp.json_body['events'] @@ -106,7 +116,8 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): sample['domain_id'] = str(domain_id) before_time = timeutils.utcnow().replace(microsecond=0) PROVIDERS.revoke_api.revoke( - revoke_model.RevokeEvent(domain_id=domain_id)) + revoke_model.RevokeEvent(domain_id=domain_id) + ) resp = self.get('/OS-REVOKE/events') events = resp.json_body['events'] @@ -114,8 +125,10 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): self.assertReportedEventMatchesRecorded(events[0], sample, before_time) def test_list_since_invalid(self): - self.get('/OS-REVOKE/events?since=blah', - expected_status=http.client.BAD_REQUEST) + self.get( + '/OS-REVOKE/events?since=blah', + expected_status=http.client.BAD_REQUEST, + ) def test_list_since_valid(self): resp = self.get('/OS-REVOKE/events?since=2013-02-27T18:30:59.999999Z') @@ -128,7 +141,8 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): sample['domain_id'] = str(domain_id) PROVIDERS.revoke_api.revoke( - revoke_model.RevokeEvent(domain_id=domain_id)) + revoke_model.RevokeEvent(domain_id=domain_id) + ) resp = self.get('/OS-REVOKE/events') events = resp.json_body['events'] @@ -144,7 +158,8 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): revoked_at = timeutils.utcnow() # Given or not, `revoked_at` will always be set in the backend. PROVIDERS.revoke_api.revoke( - revoke_model.RevokeEvent(revoked_at=revoked_at)) + revoke_model.RevokeEvent(revoked_at=revoked_at) + ) frozen_datetime.tick(delta=datetime.timedelta(seconds=1)) @@ -152,8 +167,9 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): events = resp.json_body['events'] self.assertThat(events, matchers.HasLength(1)) # Strip off the microseconds from `revoked_at`. - self.assertTimestampEqual(utils.isotime(revoked_at), - events[0]['revoked_at']) + self.assertTimestampEqual( + utils.isotime(revoked_at), events[0]['revoked_at'] + ) def test_access_token_id_not_in_event(self): ref = {'description': uuid.uuid4().hex} @@ -171,8 +187,9 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): self.assertNotIn('OS-OAUTH1:access_token_id', event) def test_retries_on_deadlock(self): - patcher = mock.patch('sqlalchemy.orm.query.Query.delete', - autospec=True) + patcher = mock.patch( + 'sqlalchemy.orm.query.Query.delete', autospec=True + ) # NOTE(mnikolaenko): raise 2 deadlocks and back to normal work of # method. Two attempts is enough to check that retry decorator works. @@ -196,8 +213,9 @@ class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): sql_delete_mock.side_effect = side_effect try: - PROVIDERS.revoke_api.revoke(revoke_model.RevokeEvent( - user_id=uuid.uuid4().hex)) + PROVIDERS.revoke_api.revoke( + revoke_model.RevokeEvent(user_id=uuid.uuid4().hex) + ) finally: if side_effect.patched: patcher.stop() diff --git a/keystone/tests/unit/test_v3_policy.py b/keystone/tests/unit/test_v3_policy.py index f6b9073200..4db15a471b 100644 --- a/keystone/tests/unit/test_v3_policy.py +++ b/keystone/tests/unit/test_v3_policy.py @@ -31,9 +31,7 @@ class PolicyTestCase(test_v3.RestfulTestCase): super(PolicyTestCase, self).setUp() self.policy = unit.new_policy_ref() self.policy_id = self.policy['id'] - PROVIDERS.policy_api.create_policy( - self.policy_id, - self.policy.copy()) + PROVIDERS.policy_api.create_policy(self.policy_id, self.policy.copy()) # policy crud tests @@ -52,21 +50,26 @@ class PolicyTestCase(test_v3.RestfulTestCase): def test_get_head_policy(self): """Call ``GET & HEAD /policies/{policy_id}``.""" - resource_url = ('/policies/%(policy_id)s' % - {'policy_id': self.policy_id}) + resource_url = '/policies/%(policy_id)s' % { + 'policy_id': self.policy_id + } r = self.get(resource_url) self.assertValidPolicyResponse(r, self.policy) self.head(resource_url, expected_status=http.client.OK) def test_update_policy(self): """Call ``PATCH /policies/{policy_id}``.""" - self.policy['blob'] = json.dumps({'data': uuid.uuid4().hex, }) + self.policy['blob'] = json.dumps( + { + 'data': uuid.uuid4().hex, + } + ) r = self.patch( '/policies/%(policy_id)s' % {'policy_id': self.policy_id}, - body={'policy': self.policy}) + body={'policy': self.policy}, + ) self.assertValidPolicyResponse(r, self.policy) def test_delete_policy(self): """Call ``DELETE /policies/{policy_id}``.""" - self.delete( - '/policies/%(policy_id)s' % {'policy_id': self.policy_id}) + self.delete('/policies/%(policy_id)s' % {'policy_id': self.policy_id}) diff --git a/keystone/tests/unit/test_v3_resource.py b/keystone/tests/unit/test_v3_resource.py index a8abb05d42..1bc4e669f1 100644 --- a/keystone/tests/unit/test_v3_resource.py +++ b/keystone/tests/unit/test_v3_resource.py @@ -29,8 +29,7 @@ CONF = keystone.conf.CONF PROVIDERS = provider_api.ProviderAPIs -class ResourceTestCase(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin): +class ResourceTestCase(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin): """Test domains and projects.""" def setUp(self): @@ -39,7 +38,7 @@ class ResourceTestCase(test_v3.RestfulTestCase, ksfixtures.KeyRepository( self.config_fixture, 'credential', - credential_fernet.MAX_ACTIVE_KEYS + credential_fernet.MAX_ACTIVE_KEYS, ) ) @@ -48,9 +47,7 @@ class ResourceTestCase(test_v3.RestfulTestCase, def test_create_domain(self): """Call ``POST /domains``.""" ref = unit.new_domain_ref() - r = self.post( - '/domains', - body={'domain': ref}) + r = self.post('/domains', body={'domain': ref}) return self.assertValidDomainResponse(r, ref) def test_create_domain_case_sensitivity(self): @@ -59,42 +56,42 @@ class ResourceTestCase(test_v3.RestfulTestCase, # ensure the name is lowercase ref['name'] = ref['name'].lower() - r = self.post( - '/domains', - body={'domain': ref}) + r = self.post('/domains', body={'domain': ref}) self.assertValidDomainResponse(r, ref) # ensure the name is uppercase ref['name'] = ref['name'].upper() - r = self.post( - '/domains', - body={'domain': ref}) + r = self.post('/domains', body={'domain': ref}) self.assertValidDomainResponse(r, ref) def test_create_domain_bad_request(self): """Call ``POST /domains``.""" - self.post('/domains', body={'domain': {}}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/domains', + body={'domain': {}}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_domain_unsafe(self): """Call ``POST /domains with unsafe names``.""" unsafe_name = 'i am not / safe' - self.config_fixture.config(group='resource', - domain_name_url_safe='off') + self.config_fixture.config( + group='resource', domain_name_url_safe='off' + ) ref = unit.new_domain_ref(name=unsafe_name) - self.post( - '/domains', - body={'domain': ref}) + self.post('/domains', body={'domain': ref}) for config_setting in ['new', 'strict']: - self.config_fixture.config(group='resource', - domain_name_url_safe=config_setting) + self.config_fixture.config( + group='resource', domain_name_url_safe=config_setting + ) ref = unit.new_domain_ref(name=unsafe_name) self.post( '/domains', body={'domain': ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_create_domain_unsafe_default(self): """Check default for unsafe names for ``POST /domains``.""" @@ -102,9 +99,7 @@ class ResourceTestCase(test_v3.RestfulTestCase, # By default, we should be able to create unsafe names ref = unit.new_domain_ref(name=unsafe_name) - self.post( - '/domains', - body={'domain': ref}) + self.post('/domains', body={'domain': ref}) def test_create_domain_creates_is_domain_project(self): """Check a project that acts as a domain is created. @@ -117,8 +112,10 @@ class ResourceTestCase(test_v3.RestfulTestCase, self.assertValidDomainResponse(r, domain_ref) # Retrieve its correspondent project - r = self.get('/projects/%(project_id)s' % { - 'project_id': r.result['domain']['id']}) + r = self.get( + '/projects/%(project_id)s' + % {'project_id': r.result['domain']['id']} + ) self.assertValidProjectResponse(r) # The created project has is_domain flag as True @@ -136,8 +133,9 @@ class ResourceTestCase(test_v3.RestfulTestCase, self.assertValidProjectResponse(r) # Retrieve its correspondent domain - r = self.get('/domains/%(domain_id)s' % { - 'domain_id': r.result['project']['id']}) + r = self.get( + '/domains/%(domain_id)s' % {'domain_id': r.result['project']['id']} + ) self.assertValidDomainResponse(r) self.assertIsNotNone(r.result['domain']) @@ -147,13 +145,12 @@ class ResourceTestCase(test_v3.RestfulTestCase, explicit_domain_id = '9aea63518f0040c6b4518d8d2242911c' ref['explicit_domain_id'] = explicit_domain_id - r = self.post( - '/domains', - body={'domain': ref}) + r = self.post('/domains', body={'domain': ref}) self.assertValidDomainResponse(r, ref) - r = self.get('/domains/%(domain_id)s' % { - 'domain_id': explicit_domain_id}) + r = self.get( + '/domains/%(domain_id)s' % {'domain_id': explicit_domain_id} + ) self.assertValidDomainResponse(r) self.assertIsNotNone(r.result['domain']) @@ -163,38 +160,43 @@ class ResourceTestCase(test_v3.RestfulTestCase, explicit_domain_id = '9aea63518f0040c6b4518d8d2242911c' ref['explicit_domain_id'] = explicit_domain_id - r = self.post( - '/domains', - body={'domain': ref}) + r = self.post('/domains', body={'domain': ref}) self.assertValidDomainResponse(r, ref) # second one should fail r = self.post( '/domains', body={'domain': ref}, - expected_status=http.client.CONFLICT) + expected_status=http.client.CONFLICT, + ) def test_create_domain_invalid_explicit_ids(self): """Call ``POST /domains`` with various invalid explicit_domain_ids.""" ref = unit.new_domain_ref() - bad_ids = ['bad!', - '', - '9aea63518f0040c', - '1234567890123456789012345678901234567890', - '9aea63518f0040c6b4518d8d2242911c9aea63518f0040c6b45'] + bad_ids = [ + 'bad!', + '', + '9aea63518f0040c', + '1234567890123456789012345678901234567890', + '9aea63518f0040c6b4518d8d2242911c9aea63518f0040c6b45', + ] for explicit_domain_id in bad_ids: ref['explicit_domain_id'] = explicit_domain_id - self.post('/domains', body={'domain': {}}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/domains', + body={'domain': {}}, + expected_status=http.client.BAD_REQUEST, + ) def test_list_head_domains(self): """Call ``GET & HEAD /domains``.""" resource_url = '/domains' r = self.get(resource_url) - self.assertValidDomainListResponse(r, ref=self.domain, - resource_url=resource_url) + self.assertValidDomainListResponse( + r, ref=self.domain, resource_url=resource_url + ) self.head(resource_url, expected_status=http.client.OK) def test_list_limit_for_domains(self): @@ -212,8 +214,7 @@ class ResourceTestCase(test_v3.RestfulTestCase, def test_get_head_domain(self): """Call ``GET /domains/{domain_id}``.""" - resource_url = '/domains/%(domain_id)s' % { - 'domain_id': self.domain_id} + resource_url = '/domains/%(domain_id)s' % {'domain_id': self.domain_id} r = self.get(resource_url) self.assertValidDomainResponse(r, self.domain) self.head(resource_url, expected_status=http.client.OK) @@ -222,33 +223,38 @@ class ResourceTestCase(test_v3.RestfulTestCase, """Call ``PATCH /domains/{domain_id}``.""" ref = unit.new_domain_ref() del ref['id'] - r = self.patch('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}, - body={'domain': ref}) + r = self.patch( + '/domains/%(domain_id)s' % {'domain_id': self.domain_id}, + body={'domain': ref}, + ) self.assertValidDomainResponse(r, ref) def test_update_domain_unsafe(self): """Call ``POST /domains/{domain_id} with unsafe names``.""" unsafe_name = 'i am not / safe' - self.config_fixture.config(group='resource', - domain_name_url_safe='off') + self.config_fixture.config( + group='resource', domain_name_url_safe='off' + ) ref = unit.new_domain_ref(name=unsafe_name) del ref['id'] - self.patch('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}, - body={'domain': ref}) + self.patch( + '/domains/%(domain_id)s' % {'domain_id': self.domain_id}, + body={'domain': ref}, + ) unsafe_name = 'i am still not / safe' for config_setting in ['new', 'strict']: - self.config_fixture.config(group='resource', - domain_name_url_safe=config_setting) + self.config_fixture.config( + group='resource', domain_name_url_safe=config_setting + ) ref = unit.new_domain_ref(name=unsafe_name) del ref['id'] - self.patch('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}, + self.patch( + '/domains/%(domain_id)s' % {'domain_id': self.domain_id}, body={'domain': ref}, - expected_status=http.client.BAD_REQUEST) + expected_status=http.client.BAD_REQUEST, + ) def test_update_domain_unsafe_default(self): """Check default for unsafe names for ``POST /domains``.""" @@ -257,9 +263,10 @@ class ResourceTestCase(test_v3.RestfulTestCase, # By default, we should be able to create unsafe names ref = unit.new_domain_ref(name=unsafe_name) del ref['id'] - self.patch('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}, - body={'domain': ref}) + self.patch( + '/domains/%(domain_id)s' % {'domain_id': self.domain_id}, + body={'domain': ref}, + ) def test_update_domain_updates_is_domain_project(self): """Check the project that acts as a domain is updated. @@ -272,12 +279,16 @@ class ResourceTestCase(test_v3.RestfulTestCase, self.assertValidDomainResponse(r, domain_ref) # Disable it - self.patch('/domains/%s' % r.result['domain']['id'], - body={'domain': {'enabled': False}}) + self.patch( + '/domains/%s' % r.result['domain']['id'], + body={'domain': {'enabled': False}}, + ) # Retrieve its correspondent project - r = self.get('/projects/%(project_id)s' % { - 'project_id': r.result['domain']['id']}) + r = self.get( + '/projects/%(project_id)s' + % {'project_id': r.result['domain']['id']} + ) self.assertValidProjectResponse(r) # The created project is disabled as well @@ -292,9 +303,11 @@ class ResourceTestCase(test_v3.RestfulTestCase, project2 = unit.new_project_ref(domain_id=domain2['id']) PROVIDERS.resource_api.create_project(project2['id'], project2) - user2 = unit.create_user(PROVIDERS.identity_api, - domain_id=domain2['id'], - project_id=project2['id']) + user2 = unit.create_user( + PROVIDERS.identity_api, + domain_id=domain2['id'], + project_id=project2['id'], + ) role_member = unit.new_role_ref() PROVIDERS.role_api.create_role(role_member['id'], role_member) @@ -307,38 +320,45 @@ class ResourceTestCase(test_v3.RestfulTestCase, auth_data = self.build_authentication_request( user_id=user2['id'], password=user2['password'], - project_id=project2['id']) + project_id=project2['id'], + ) self.v3_create_token(auth_data) # Now disable the domain domain2['enabled'] = False - r = self.patch('/domains/%(domain_id)s' % { - 'domain_id': domain2['id']}, - body={'domain': {'enabled': False}}) + r = self.patch( + '/domains/%(domain_id)s' % {'domain_id': domain2['id']}, + body={'domain': {'enabled': False}}, + ) self.assertValidDomainResponse(r, domain2) # Try looking up in v3 by name and id auth_data = self.build_authentication_request( user_id=user2['id'], password=user2['password'], - project_id=project2['id']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + project_id=project2['id'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) auth_data = self.build_authentication_request( username=user2['name'], user_domain_id=domain2['id'], password=user2['password'], - project_id=project2['id']) - self.v3_create_token(auth_data, - expected_status=http.client.UNAUTHORIZED) + project_id=project2['id'], + ) + self.v3_create_token( + auth_data, expected_status=http.client.UNAUTHORIZED + ) def test_delete_enabled_domain_fails(self): """Call ``DELETE /domains/{domain_id}`` (when domain enabled).""" # Try deleting an enabled domain, which should fail - self.delete('/domains/%(domain_id)s' % { - 'domain_id': self.domain['id']}, - expected_status=exception.ForbiddenAction.code) + self.delete( + '/domains/%(domain_id)s' % {'domain_id': self.domain['id']}, + expected_status=exception.ForbiddenAction.code, + ) def test_delete_domain(self): """Call ``DELETE /domains/{domain_id}``. @@ -364,8 +384,9 @@ class ResourceTestCase(test_v3.RestfulTestCase, group = unit.new_group_ref(domain_id=self.domain_id) group = PROVIDERS.identity_api.create_group(group) - credential = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id) + credential = unit.new_credential_ref( + user_id=self.user['id'], project_id=self.project_id + ) PROVIDERS.credential_api.create_credential( credential['id'], credential ) @@ -379,43 +400,56 @@ class ResourceTestCase(test_v3.RestfulTestCase, project2['id'], project2 ) - user2 = unit.new_user_ref(domain_id=domain2['id'], - project_id=project2['id']) + user2 = unit.new_user_ref( + domain_id=domain2['id'], project_id=project2['id'] + ) user2 = PROVIDERS.identity_api.create_user(user2) group2 = unit.new_group_ref(domain_id=domain2['id']) group2 = PROVIDERS.identity_api.create_group(group2) - credential2 = unit.new_credential_ref(user_id=user2['id'], - project_id=project2['id']) + credential2 = unit.new_credential_ref( + user_id=user2['id'], project_id=project2['id'] + ) PROVIDERS.credential_api.create_credential( credential2['id'], credential2 ) # Now disable the new domain and delete it domain2['enabled'] = False - r = self.patch('/domains/%(domain_id)s' % { - 'domain_id': domain2['id']}, - body={'domain': {'enabled': False}}) + r = self.patch( + '/domains/%(domain_id)s' % {'domain_id': domain2['id']}, + body={'domain': {'enabled': False}}, + ) self.assertValidDomainResponse(r, domain2) self.delete('/domains/%(domain_id)s' % {'domain_id': domain2['id']}) # Check all the domain2 relevant entities are gone - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain, - domain2['id']) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - project2['id']) - self.assertRaises(exception.GroupNotFound, - PROVIDERS.identity_api.get_group, - group2['id']) - self.assertRaises(exception.UserNotFound, - PROVIDERS.identity_api.get_user, - user2['id']) - self.assertRaises(exception.CredentialNotFound, - PROVIDERS.credential_api.get_credential, - credential2['id']) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain, + domain2['id'], + ) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + project2['id'], + ) + self.assertRaises( + exception.GroupNotFound, + PROVIDERS.identity_api.get_group, + group2['id'], + ) + self.assertRaises( + exception.UserNotFound, + PROVIDERS.identity_api.get_user, + user2['id'], + ) + self.assertRaises( + exception.CredentialNotFound, + PROVIDERS.credential_api.get_credential, + credential2['id'], + ) # ...and that all self.domain entities are still here r = PROVIDERS.resource_api.get_domain(self.domain['id']) @@ -437,18 +471,22 @@ class ResourceTestCase(test_v3.RestfulTestCase, self.assertValidDomainResponse(r, domain_ref) domain_id = r.result['domain']['id'] # Create a Idp in the domain - self.put('/OS-FEDERATION/identity_providers/test_idp', - body={"identity_provider": { - "domain_id": domain_id}}, - expected_status=http.client.CREATED) + self.put( + '/OS-FEDERATION/identity_providers/test_idp', + body={"identity_provider": {"domain_id": domain_id}}, + expected_status=http.client.CREATED, + ) # Disable and delete the domain with no error. - self.patch('/domains/%(domain_id)s' % { - 'domain_id': domain_id}, - body={'domain': {'enabled': False}}) + self.patch( + '/domains/%(domain_id)s' % {'domain_id': domain_id}, + body={'domain': {'enabled': False}}, + ) self.delete('/domains/%s' % domain_id) # The Idp is deleted as well - self.get('/OS-FEDERATION/identity_providers/test_idp', - expected_status=http.client.NOT_FOUND) + self.get( + '/OS-FEDERATION/identity_providers/test_idp', + expected_status=http.client.NOT_FOUND, + ) def test_delete_domain_deletes_is_domain_project(self): """Check the project that acts as a domain is deleted. @@ -461,27 +499,37 @@ class ResourceTestCase(test_v3.RestfulTestCase, self.assertValidDomainResponse(r, domain_ref) # Retrieve its correspondent project - self.get('/projects/%(project_id)s' % { - 'project_id': r.result['domain']['id']}) + self.get( + '/projects/%(project_id)s' + % {'project_id': r.result['domain']['id']} + ) # Delete the domain - self.patch('/domains/%s' % r.result['domain']['id'], - body={'domain': {'enabled': False}}) + self.patch( + '/domains/%s' % r.result['domain']['id'], + body={'domain': {'enabled': False}}, + ) self.delete('/domains/%s' % r.result['domain']['id']) # The created project is deleted as well - self.get('/projects/%(project_id)s' % { - 'project_id': r.result['domain']['id']}, expected_status=404) + self.get( + '/projects/%(project_id)s' + % {'project_id': r.result['domain']['id']}, + expected_status=404, + ) def test_delete_default_domain(self): # Need to disable it first. - self.patch('/domains/%(domain_id)s' % { - 'domain_id': CONF.identity.default_domain_id}, - body={'domain': {'enabled': False}}) + self.patch( + '/domains/%(domain_id)s' + % {'domain_id': CONF.identity.default_domain_id}, + body={'domain': {'enabled': False}}, + ) self.delete( - '/domains/%(domain_id)s' % { - 'domain_id': CONF.identity.default_domain_id}) + '/domains/%(domain_id)s' + % {'domain_id': CONF.identity.default_domain_id} + ) def test_token_revoked_once_domain_disabled(self): """Test token from a disabled domain has been invalidated. @@ -493,13 +541,14 @@ class ResourceTestCase(test_v3.RestfulTestCase, domain = unit.new_domain_ref() PROVIDERS.resource_api.create_domain(domain['id'], domain) - user2 = unit.create_user(PROVIDERS.identity_api, - domain_id=domain['id']) + user2 = unit.create_user( + PROVIDERS.identity_api, domain_id=domain['id'] + ) # build a request body auth_body = self.build_authentication_request( - user_id=user2['id'], - password=user2['password']) + user_id=user2['id'], password=user2['password'] + ) # sends a request for the user's token token_resp = self.post('/auth/tokens', body=auth_body) @@ -507,21 +556,24 @@ class ResourceTestCase(test_v3.RestfulTestCase, subject_token = token_resp.headers.get('x-subject-token') # validates the returned token and it should be valid. - self.head('/auth/tokens', - headers={'x-subject-token': subject_token}, - expected_status=http.client.OK) + self.head( + '/auth/tokens', + headers={'x-subject-token': subject_token}, + expected_status=http.client.OK, + ) # now disable the domain domain['enabled'] = False url = "/domains/%(domain_id)s" % {'domain_id': domain['id']} - self.patch(url, - body={'domain': {'enabled': False}}) + self.patch(url, body={'domain': {'enabled': False}}) # validates the same token again and it should be 'not found' # as the domain has already been disabled. - self.head('/auth/tokens', - headers={'x-subject-token': subject_token}, - expected_status=http.client.NOT_FOUND) + self.head( + '/auth/tokens', + headers={'x-subject-token': subject_token}, + expected_status=http.client.NOT_FOUND, + ) def test_delete_domain_hierarchy(self): """Call ``DELETE /domains/{domain_id}``.""" @@ -534,30 +586,35 @@ class ResourceTestCase(test_v3.RestfulTestCase, ) leaf_project = unit.new_project_ref( - domain_id=domain['id'], - parent_id=root_project['id']) + domain_id=domain['id'], parent_id=root_project['id'] + ) PROVIDERS.resource_api.create_project(leaf_project['id'], leaf_project) # Need to disable it first. - self.patch('/domains/%(domain_id)s' % { - 'domain_id': domain['id']}, - body={'domain': {'enabled': False}}) + self.patch( + '/domains/%(domain_id)s' % {'domain_id': domain['id']}, + body={'domain': {'enabled': False}}, + ) - self.delete( - '/domains/%(domain_id)s' % { - 'domain_id': domain['id']}) + self.delete('/domains/%(domain_id)s' % {'domain_id': domain['id']}) - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.get_domain, - domain['id']) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.get_domain, + domain['id'], + ) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - root_project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + root_project['id'], + ) - self.assertRaises(exception.ProjectNotFound, - PROVIDERS.resource_api.get_project, - leaf_project['id']) + self.assertRaises( + exception.ProjectNotFound, + PROVIDERS.resource_api.get_project, + leaf_project['id'], + ) def test_forbid_operations_on_federated_domain(self): """Make sure one cannot operate on federated domain. @@ -567,36 +624,57 @@ class ResourceTestCase(test_v3.RestfulTestCase, id 'Federated' are used. """ + def create_domains(): - for variation in ('Federated', 'FEDERATED', - 'federated', 'fEderated'): + for variation in ( + 'Federated', + 'FEDERATED', + 'federated', + 'fEderated', + ): domain = unit.new_domain_ref() domain['id'] = variation yield domain for domain in create_domains(): self.assertRaises( - AssertionError, PROVIDERS.resource_api.create_domain, - domain['id'], domain) + AssertionError, + PROVIDERS.resource_api.create_domain, + domain['id'], + domain, + ) self.assertRaises( - AssertionError, PROVIDERS.resource_api.update_domain, - domain['id'], domain) + AssertionError, + PROVIDERS.resource_api.update_domain, + domain['id'], + domain, + ) self.assertRaises( - exception.DomainNotFound, PROVIDERS.resource_api.delete_domain, - domain['id']) + exception.DomainNotFound, + PROVIDERS.resource_api.delete_domain, + domain['id'], + ) # swap 'name' with 'id' and try again, expecting the request to # gracefully fail domain['id'], domain['name'] = domain['name'], domain['id'] self.assertRaises( - AssertionError, PROVIDERS.resource_api.create_domain, - domain['id'], domain) + AssertionError, + PROVIDERS.resource_api.create_domain, + domain['id'], + domain, + ) self.assertRaises( - AssertionError, PROVIDERS.resource_api.update_domain, - domain['id'], domain) + AssertionError, + PROVIDERS.resource_api.update_domain, + domain['id'], + domain, + ) self.assertRaises( - exception.DomainNotFound, PROVIDERS.resource_api.delete_domain, - domain['id']) + exception.DomainNotFound, + PROVIDERS.resource_api.delete_domain, + domain['id'], + ) def test_forbid_operations_on_defined_federated_domain(self): """Make sure one cannot operate on a user-defined federated domain. @@ -605,18 +683,27 @@ class ResourceTestCase(test_v3.RestfulTestCase, """ non_default_name = 'beta_federated_domain' - self.config_fixture.config(group='federation', - federated_domain_name=non_default_name) + self.config_fixture.config( + group='federation', federated_domain_name=non_default_name + ) domain = unit.new_domain_ref(name=non_default_name) - self.assertRaises(AssertionError, - PROVIDERS.resource_api.create_domain, - domain['id'], domain) - self.assertRaises(exception.DomainNotFound, - PROVIDERS.resource_api.delete_domain, - domain['id']) - self.assertRaises(AssertionError, - PROVIDERS.resource_api.update_domain, - domain['id'], domain) + self.assertRaises( + AssertionError, + PROVIDERS.resource_api.create_domain, + domain['id'], + domain, + ) + self.assertRaises( + exception.DomainNotFound, + PROVIDERS.resource_api.delete_domain, + domain['id'], + ) + self.assertRaises( + AssertionError, + PROVIDERS.resource_api.update_domain, + domain['id'], + domain, + ) # Project CRUD tests @@ -624,48 +711,54 @@ class ResourceTestCase(test_v3.RestfulTestCase, """Call ``GET & HEAD /projects``.""" resource_url = '/projects' r = self.get(resource_url) - self.assertValidProjectListResponse(r, ref=self.project, - resource_url=resource_url) + self.assertValidProjectListResponse( + r, ref=self.project, resource_url=resource_url + ) self.head(resource_url, expected_status=http.client.OK) def test_create_project(self): """Call ``POST /projects``.""" ref = unit.new_project_ref(domain_id=self.domain_id) - r = self.post( - '/projects', - body={'project': ref}) + r = self.post('/projects', body={'project': ref}) self.assertValidProjectResponse(r, ref) def test_create_project_bad_request(self): """Call ``POST /projects``.""" - self.post('/projects', body={'project': {}}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/projects', + body={'project': {}}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_project_invalid_domain_id(self): """Call ``POST /projects``.""" ref = unit.new_project_ref(domain_id=uuid.uuid4().hex) - self.post('/projects', body={'project': ref}, - expected_status=http.client.BAD_REQUEST) + self.post( + '/projects', + body={'project': ref}, + expected_status=http.client.BAD_REQUEST, + ) def test_create_project_unsafe(self): """Call ``POST /projects with unsafe names``.""" unsafe_name = 'i am not / safe' - self.config_fixture.config(group='resource', - project_name_url_safe='off') + self.config_fixture.config( + group='resource', project_name_url_safe='off' + ) ref = unit.new_project_ref(name=unsafe_name) - self.post( - '/projects',