Merge "Keystone to honor the "domain" attribute mapping rules."
This commit is contained in:
commit
993e589fa1
@ -75,6 +75,11 @@ A mapping looks as follows:
|
||||
* `<user>`: the local user that will be mapped to the federated user.
|
||||
* `<group>`: (optional) the local groups the federated user will be placed in.
|
||||
* `<projects>`: (optional) the local projects mapped to the federated user.
|
||||
* `<domain>`: (optional) the local domain mapped to the federated user,
|
||||
projects, and groups. Projects and groups can also override this default
|
||||
domain by defining a domain of their own. Moreover, if no domain is
|
||||
defined in this configuration, the attribute mapping schema will use the
|
||||
identity provider OpenStack domain.
|
||||
|
||||
* `remote`: a JSON object containing information on what remote attributes will be mapped.
|
||||
|
||||
@ -847,7 +852,8 @@ It is important to note the following constraints apply when auto-provisioning:
|
||||
|
||||
* Projects are the only resource that will be created dynamically.
|
||||
* Projects will be created within the domain associated with the Identity
|
||||
Provider.
|
||||
Provider or the domain mapped via the attribute mapping
|
||||
(`federation_attribute_mapping_schema_version >= 2.0`).
|
||||
* The ``projects`` section of the mapping must also contain a ``roles``
|
||||
section.
|
||||
|
||||
|
@ -17,6 +17,8 @@ import flask_restful
|
||||
import http.client
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from oslo_log import log
|
||||
|
||||
from keystone.api._shared import authentication
|
||||
from keystone.api._shared import json_home_relations
|
||||
from keystone.common import provider_api
|
||||
@ -30,6 +32,7 @@ from keystone.federation import utils
|
||||
from keystone.server import flask as ks_flask
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
CONF = keystone.conf.CONF
|
||||
ENFORCER = rbac_enforcer.RBACEnforcer
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
@ -272,30 +275,50 @@ class MappingResource(_ResourceBase):
|
||||
ENFORCER.enforce_call(action='identity:list_mappings')
|
||||
return self.wrap_collection(PROVIDERS.federation_api.list_mappings())
|
||||
|
||||
def _internal_normalize_and_validate_attribute_mapping(
|
||||
self, action_executed_message="created"):
|
||||
mapping = self.request_body_json.get('mapping', {})
|
||||
mapping = self._normalize_dict(mapping)
|
||||
|
||||
if not mapping.get('schema_version'):
|
||||
default_schema_version =\
|
||||
utils.get_default_attribute_mapping_schema_version()
|
||||
LOG.debug("A mapping [%s] was %s without providing a "
|
||||
"'schema_version'; therefore, we need to set one. The "
|
||||
"current default is [%s]. We will use this value for "
|
||||
"the attribute mapping being registered. It is "
|
||||
"recommended that one does not rely on this default "
|
||||
"value, as it can change, and the already persisted "
|
||||
"attribute mappings will remain with the previous "
|
||||
"default values.", mapping, action_executed_message,
|
||||
default_schema_version)
|
||||
mapping['schema_version'] = default_schema_version
|
||||
utils.validate_mapping_structure(mapping)
|
||||
return mapping
|
||||
|
||||
def put(self, mapping_id):
|
||||
"""Create a mapping.
|
||||
|
||||
PUT /OS-FEDERATION/mappings/{mapping_id}
|
||||
"""
|
||||
ENFORCER.enforce_call(action='identity:create_mapping')
|
||||
mapping = self.request_body_json.get('mapping', {})
|
||||
mapping = self._normalize_dict(mapping)
|
||||
utils.validate_mapping_structure(mapping)
|
||||
mapping_ref = PROVIDERS.federation_api.create_mapping(
|
||||
mapping_id, mapping)
|
||||
|
||||
am = self._internal_normalize_and_validate_attribute_mapping(
|
||||
"registered")
|
||||
mapping_ref = PROVIDERS.federation_api.create_mapping(mapping_id, am)
|
||||
|
||||
return self.wrap_member(mapping_ref), http.client.CREATED
|
||||
|
||||
def patch(self, mapping_id):
|
||||
"""Update a mapping.
|
||||
"""Update an attribute mapping for identity federation.
|
||||
|
||||
PATCH /OS-FEDERATION/mappings/{mapping_id}
|
||||
"""
|
||||
ENFORCER.enforce_call(action='identity:update_mapping')
|
||||
mapping = self.request_body_json.get('mapping', {})
|
||||
mapping = self._normalize_dict(mapping)
|
||||
utils.validate_mapping_structure(mapping)
|
||||
mapping_ref = PROVIDERS.federation_api.update_mapping(
|
||||
mapping_id, mapping)
|
||||
|
||||
am = self._internal_normalize_and_validate_attribute_mapping("updated")
|
||||
mapping_ref = PROVIDERS.federation_api.update_mapping(mapping_id, am)
|
||||
|
||||
return self.wrap_member(mapping_ref)
|
||||
|
||||
def delete(self, mapping_id):
|
||||
|
@ -106,6 +106,60 @@ def handle_scoped_token(token, federation_api, identity_api):
|
||||
return response_data
|
||||
|
||||
|
||||
def configure_project_domain(shadow_project, idp_domain_id,
|
||||
resource_api):
|
||||
"""Configure federated projects domain.
|
||||
|
||||
We set the domain to be the default (idp_domain_id) if the project
|
||||
from the attribute mapping comes without a domain.
|
||||
"""
|
||||
LOG.debug('Processing domain for project: %s', shadow_project)
|
||||
domain = shadow_project.get('domain', {"id": idp_domain_id})
|
||||
if 'id' not in domain:
|
||||
db_domain = resource_api.get_domain_by_name(domain['name'])
|
||||
domain = {"id": db_domain.get('id')}
|
||||
shadow_project['domain'] = domain
|
||||
LOG.debug('Project [%s] domain ID was resolved to [%s]',
|
||||
shadow_project['name'], shadow_project['domain']['id'])
|
||||
|
||||
|
||||
def handle_projects_from_mapping(shadow_projects, idp_domain_id,
|
||||
existing_roles, user, assignment_api,
|
||||
resource_api):
|
||||
for shadow_project in shadow_projects:
|
||||
configure_project_domain(
|
||||
shadow_project, idp_domain_id, resource_api)
|
||||
try:
|
||||
# Check and see if the project already exists and if it
|
||||
# does not, try to create it.
|
||||
project = resource_api.get_project_by_name(
|
||||
shadow_project['name'], shadow_project['domain']['id']
|
||||
)
|
||||
except exception.ProjectNotFound:
|
||||
LOG.info(
|
||||
'Project %(project_name)s does not exist. It will be '
|
||||
'automatically provisioning for user %(user_id)s.',
|
||||
{'project_name': shadow_project['name'],
|
||||
'user_id': user['id']}
|
||||
)
|
||||
project_ref = {
|
||||
'id': uuid.uuid4().hex,
|
||||
'name': shadow_project['name'],
|
||||
'domain_id': shadow_project['domain']['id']
|
||||
}
|
||||
project = resource_api.create_project(
|
||||
project_ref['id'],
|
||||
project_ref
|
||||
)
|
||||
shadow_roles = shadow_project['roles']
|
||||
for shadow_role in shadow_roles:
|
||||
assignment_api.create_grant(
|
||||
existing_roles[shadow_role['name']]['id'],
|
||||
user_id=user['id'],
|
||||
project_id=project['id']
|
||||
)
|
||||
|
||||
|
||||
def handle_unscoped_token(auth_payload, resource_api, federation_api,
|
||||
identity_api, assignment_api, role_api):
|
||||
|
||||
@ -141,41 +195,6 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api,
|
||||
identity_provider=idp_id
|
||||
)
|
||||
|
||||
def create_projects_from_mapping(shadow_projects, idp_domain_id,
|
||||
existing_roles, user, assignment_api,
|
||||
resource_api):
|
||||
for shadow_project in shadow_projects:
|
||||
try:
|
||||
# Check and see if the project already exists and if it
|
||||
# does not, try to create it.
|
||||
project = resource_api.get_project_by_name(
|
||||
shadow_project['name'], idp_domain_id
|
||||
)
|
||||
except exception.ProjectNotFound:
|
||||
LOG.info(
|
||||
'Project %(project_name)s does not exist. It will be '
|
||||
'automatically provisioning for user %(user_id)s.',
|
||||
{'project_name': shadow_project['name'],
|
||||
'user_id': user['id']}
|
||||
)
|
||||
project_ref = {
|
||||
'id': uuid.uuid4().hex,
|
||||
'name': shadow_project['name'],
|
||||
'domain_id': idp_domain_id
|
||||
}
|
||||
project = resource_api.create_project(
|
||||
project_ref['id'],
|
||||
project_ref
|
||||
)
|
||||
|
||||
shadow_roles = shadow_project['roles']
|
||||
for shadow_role in shadow_roles:
|
||||
assignment_api.create_grant(
|
||||
existing_roles[shadow_role['name']]['id'],
|
||||
user_id=user['id'],
|
||||
project_id=project['id']
|
||||
)
|
||||
|
||||
def is_ephemeral_user(mapped_properties):
|
||||
return mapped_properties['user']['type'] == utils.UserType.EPHEMERAL
|
||||
|
||||
@ -231,21 +250,19 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api,
|
||||
raise exception.Unauthorized(e)
|
||||
|
||||
if is_ephemeral_user(mapped_properties):
|
||||
unique_id, display_name = (
|
||||
get_user_unique_id_and_display_name(mapped_properties)
|
||||
)
|
||||
email = mapped_properties['user'].get('email')
|
||||
idp_domain_id = federation_api.get_idp(
|
||||
identity_provider)['domain_id']
|
||||
|
||||
validate_and_prepare_federated_user(mapped_properties,
|
||||
idp_domain_id, resource_api)
|
||||
|
||||
user = identity_api.shadow_federated_user(
|
||||
identity_provider,
|
||||
protocol, unique_id,
|
||||
display_name,
|
||||
email,
|
||||
protocol, mapped_properties['user'],
|
||||
group_ids=mapped_properties['group_ids'])
|
||||
|
||||
if 'projects' in mapped_properties:
|
||||
idp_domain_id = federation_api.get_idp(
|
||||
identity_provider
|
||||
)['domain_id']
|
||||
|
||||
existing_roles = {
|
||||
role['name']: role for role in role_api.list_roles()
|
||||
}
|
||||
@ -260,7 +277,7 @@ def handle_unscoped_token(auth_payload, resource_api, federation_api,
|
||||
idp_domain_id,
|
||||
identity_provider
|
||||
)
|
||||
create_projects_from_mapping(
|
||||
handle_projects_from_mapping(
|
||||
mapped_properties['projects'],
|
||||
idp_domain_id,
|
||||
existing_roles,
|
||||
@ -327,7 +344,8 @@ def apply_mapping_filter(identity_provider, protocol, assertion,
|
||||
return mapped_properties, mapping_id
|
||||
|
||||
|
||||
def get_user_unique_id_and_display_name(mapped_properties):
|
||||
def validate_and_prepare_federated_user(
|
||||
mapped_properties, idp_domain_id, resource_api):
|
||||
"""Setup federated username.
|
||||
|
||||
Function covers all the cases for properly setting user id, a primary
|
||||
@ -343,9 +361,18 @@ def get_user_unique_id_and_display_name(mapped_properties):
|
||||
3) If user_id is not set and user_name is, set user_id as url safe version
|
||||
of user_name.
|
||||
|
||||
Furthermore, we set the IdP as the user domain, if the user definition
|
||||
does not come with a domain definition.
|
||||
|
||||
:param mapped_properties: Properties issued by a RuleProcessor.
|
||||
:type: dictionary
|
||||
|
||||
:param idp_domain_id: The domain ID of the IdP registered in OpenStack.
|
||||
:type: string
|
||||
|
||||
:param resource_api: The resource API used to access the database layer.
|
||||
:type: object
|
||||
|
||||
:raises keystone.exception.Unauthorized: If neither `user_name` nor
|
||||
`user_id` is set.
|
||||
:returns: tuple with user identification
|
||||
@ -372,4 +399,13 @@ def get_user_unique_id_and_display_name(mapped_properties):
|
||||
if user_name:
|
||||
user['name'] = user_name
|
||||
user['id'] = parse.quote(user_id)
|
||||
return (user['id'], user['name'])
|
||||
|
||||
LOG.debug('Processing domain for federated user: %s', user)
|
||||
domain = user.get('domain', {"id": idp_domain_id})
|
||||
if 'id' not in domain:
|
||||
db_domain = resource_api.get_domain_by_name(domain['name'])
|
||||
domain = {"id": db_domain.get('id')}
|
||||
|
||||
user['domain'] = domain
|
||||
LOG.debug('User [%s] domain ID was resolved to [%s]', user['name'],
|
||||
user['domain']['id'])
|
||||
|
@ -40,6 +40,11 @@ from keystone.federation import utils as mapping_engine
|
||||
from keystone.i18n import _
|
||||
from keystone.server import backends
|
||||
|
||||
# We need to define the log level to INFO. Otherwise, when using the
|
||||
# system, we will not be able to see anything.
|
||||
log.set_defaults(default_log_levels="INFO")
|
||||
|
||||
|
||||
CONF = keystone.conf.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
@ -1144,6 +1149,8 @@ class MappingEngineTester(BaseApp):
|
||||
raise SystemExit(_("Error while opening file "
|
||||
"%(path)s: %(err)s") % {'path': path, 'err': e})
|
||||
|
||||
LOG.debug("Assertions loaded: [%s].", self.assertion)
|
||||
|
||||
def normalize_assertion(self):
|
||||
def split(line, line_num):
|
||||
try:
|
||||
@ -1179,6 +1186,9 @@ class MappingEngineTester(BaseApp):
|
||||
def main(cls):
|
||||
if CONF.command.engine_debug:
|
||||
mapping_engine.LOG.logger.setLevel('DEBUG')
|
||||
LOG.logger.setLevel('DEBUG')
|
||||
|
||||
LOG.debug("Debug log level enabled!")
|
||||
else:
|
||||
mapping_engine.LOG.logger.setLevel('WARN')
|
||||
|
||||
@ -1186,7 +1196,23 @@ class MappingEngineTester(BaseApp):
|
||||
|
||||
tester.read_rules(CONF.command.rules)
|
||||
tester.normalize_rules()
|
||||
mapping_engine.validate_mapping_structure(tester.rules)
|
||||
|
||||
attribute_mapping = tester.rules.copy()
|
||||
if CONF.command.mapping_schema_version:
|
||||
attribute_mapping[
|
||||
'schema_version'] = CONF.command.mapping_schema_version
|
||||
|
||||
if not attribute_mapping.get('schema_version'):
|
||||
default_schema_version = '1.0'
|
||||
LOG.warning('No schema version defined in rules [%s]. Therefore,'
|
||||
'we will use the default as [%s].', attribute_mapping,
|
||||
default_schema_version)
|
||||
attribute_mapping[
|
||||
'schema_version'] = default_schema_version
|
||||
|
||||
LOG.info("Validating Attribute mapping rules [%s].", attribute_mapping)
|
||||
mapping_engine.validate_mapping_structure(attribute_mapping)
|
||||
LOG.info("Attribute mapping rules are valid.")
|
||||
|
||||
tester.read_assertion(CONF.command.input)
|
||||
tester.normalize_assertion()
|
||||
@ -1200,6 +1226,8 @@ class MappingEngineTester(BaseApp):
|
||||
rp = mapping_engine.RuleProcessor(tester.mapping_id,
|
||||
tester.rules['rules'])
|
||||
mapped = rp.process(tester.assertion)
|
||||
|
||||
LOG.info("Result of the attribute mapping processing.")
|
||||
print(jsonutils.dumps(mapped, indent=2))
|
||||
|
||||
@classmethod
|
||||
@ -1234,6 +1262,11 @@ class MappingEngineTester(BaseApp):
|
||||
default=False, action="store_true",
|
||||
help=("Enable debug messages from the mapping "
|
||||
"engine."))
|
||||
parser.add_argument('--mapping-schema-version', default=None,
|
||||
required=False,
|
||||
help=("The override for the schema version of "
|
||||
"the rules that are loaded in the 'rules' "
|
||||
"option of the test CLI."))
|
||||
|
||||
|
||||
class MappingPopulate(BaseApp):
|
||||
|
@ -0,0 +1,35 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Add Identity Federation attribute mapping schema version.
|
||||
|
||||
Revision ID: 47147121
|
||||
Revises: 11c3b243b4cb
|
||||
Create Date: 2023-12-12 09:00:00
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy import String
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '47147121'
|
||||
down_revision = '11c3b243b4cb'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column("mapping", Column('schema_version',
|
||||
String(5), nullable=False,
|
||||
server_default="1.0"))
|
@ -1 +1 @@
|
||||
11c3b243b4cb
|
||||
47147121
|
||||
|
@ -39,6 +39,7 @@ CONTRACT_BRANCH = 'contract'
|
||||
RELEASES = (
|
||||
'yoga',
|
||||
'bobcat',
|
||||
'2024.01',
|
||||
)
|
||||
MILESTONES = (
|
||||
'yoga',
|
||||
|
@ -102,6 +102,18 @@ Default time in minutes for the validity of group memberships carried over
|
||||
from a mapping. Default is 0, which means disabled.
|
||||
"""))
|
||||
|
||||
attribute_mapping_default_schema_version = cfg.StrOpt(
|
||||
'attribute_mapping_default_schema_version',
|
||||
default='1.0',
|
||||
help=utils.fmt("""
|
||||
The attribute mapping default schema version to be used, if the attribute
|
||||
mapping being registered does not have a schema version. One must bear in
|
||||
mind that changing this value will have no effect on attribute mappings that
|
||||
were previously registered when another default value was applied. Once
|
||||
registered, one needs to update the attribute mapping schema via the update
|
||||
API to be able to change an attribute mapping schema version.
|
||||
"""))
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
@ -113,6 +125,7 @@ ALL_OPTS = [
|
||||
sso_callback_template,
|
||||
caching,
|
||||
default_authorization_ttl,
|
||||
attribute_mapping_default_schema_version,
|
||||
]
|
||||
|
||||
|
||||
|
@ -125,10 +125,12 @@ class IdPRemoteIdsModel(sql.ModelBase, sql.ModelDictMixin):
|
||||
|
||||
class MappingModel(sql.ModelBase, sql.ModelDictMixin):
|
||||
__tablename__ = 'mapping'
|
||||
attributes = ['id', 'rules']
|
||||
attributes = ['id', 'rules', 'schema_version']
|
||||
|
||||
id = sql.Column(sql.String(64), primary_key=True)
|
||||
rules = sql.Column(sql.JsonBlob(), nullable=False)
|
||||
schema_version = sql.Column(sql.String(5), nullable=False,
|
||||
server_default='1.0')
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, dictionary):
|
||||
@ -314,6 +316,7 @@ class Federation(base.FederationDriverBase):
|
||||
ref = {}
|
||||
ref['id'] = mapping_id
|
||||
ref['rules'] = mapping.get('rules')
|
||||
ref['schema_version'] = mapping.get('schema_version')
|
||||
with sql.session_for_write() as session:
|
||||
mapping_ref = MappingModel.from_dict(ref)
|
||||
session.add(mapping_ref)
|
||||
@ -339,6 +342,8 @@ class Federation(base.FederationDriverBase):
|
||||
ref = {}
|
||||
ref['id'] = mapping_id
|
||||
ref['rules'] = mapping.get('rules')
|
||||
if mapping.get('schema_version'):
|
||||
ref['schema_version'] = mapping.get('schema_version')
|
||||
with sql.session_for_write() as session:
|
||||
mapping_ref = self._get_mapping(session, mapping_id)
|
||||
old_mapping = mapping_ref.to_dict()
|
||||
|
@ -164,8 +164,10 @@ class Manager(manager.Manager):
|
||||
|
||||
def evaluate(self, idp_id, protocol_id, assertion_data):
|
||||
mapping = self.get_mapping_from_idp_and_protocol(idp_id, protocol_id)
|
||||
rules = mapping['rules']
|
||||
rule_processor = utils.RuleProcessor(mapping['id'], rules)
|
||||
|
||||
rule_processor = utils.create_attribute_mapping_rules_processor(
|
||||
mapping)
|
||||
|
||||
mapped_properties = rule_processor.process(assertion_data)
|
||||
return mapped_properties, mapping['id']
|
||||
|
||||
@ -176,18 +178,9 @@ class Manager(manager.Manager):
|
||||
def delete_protocol(self, idp_id, protocol_id):
|
||||
hints = driver_hints.Hints()
|
||||
hints.add_filter('protocol_id', protocol_id)
|
||||
shadow_users = PROVIDERS.shadow_users_api.list_federated_users_info(
|
||||
hints)
|
||||
|
||||
self.driver.delete_protocol(idp_id, protocol_id)
|
||||
|
||||
for shadow_user in shadow_users:
|
||||
PROVIDERS.identity_api._shadow_federated_user.invalidate(
|
||||
PROVIDERS.identity_api, shadow_user['idp_id'],
|
||||
shadow_user['protocol_id'], shadow_user['unique_id'],
|
||||
shadow_user['display_name'],
|
||||
shadow_user.get('extra', {}).get('email'))
|
||||
|
||||
def update_protocol(self, idp_id, protocol_id, protocol):
|
||||
self._validate_mapping_exists(protocol['mapping_id'])
|
||||
return self.driver.update_protocol(idp_id, protocol_id, protocol)
|
||||
|
@ -13,6 +13,7 @@
|
||||
"""Utilities for Federation Extension."""
|
||||
|
||||
import ast
|
||||
import copy
|
||||
import re
|
||||
|
||||
import flask
|
||||
@ -54,8 +55,20 @@ ROLE_PROPERTIES = {
|
||||
}
|
||||
}
|
||||
|
||||
PROJECTS_SCHEMA = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["name", "roles"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"roles": ROLE_PROPERTIES
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MAPPING_SCHEMA = {
|
||||
IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0 = {
|
||||
"type": "object",
|
||||
"required": ['rules'],
|
||||
"properties": {
|
||||
@ -90,18 +103,7 @@ MAPPING_SCHEMA = {
|
||||
},
|
||||
"additionalProperties": False
|
||||
},
|
||||
"projects": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["name", "roles"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"roles": ROLE_PROPERTIES
|
||||
}
|
||||
}
|
||||
},
|
||||
"projects": PROJECTS_SCHEMA,
|
||||
"group": {
|
||||
"type": "object",
|
||||
"oneOf": [
|
||||
@ -135,6 +137,9 @@ MAPPING_SCHEMA = {
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"schema_version": {
|
||||
"name": {"type": "string"}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
@ -240,6 +245,22 @@ MAPPING_SCHEMA = {
|
||||
}
|
||||
}
|
||||
|
||||
# `IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0` adds the domain option for projects,
|
||||
# the goal is to work in a similar fashion as `user` and `groups` properties
|
||||
IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0 = copy.deepcopy(
|
||||
IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0)
|
||||
|
||||
PROJECTS_SCHEMA_2_0 = copy.deepcopy(PROJECTS_SCHEMA)
|
||||
PROJECTS_SCHEMA_2_0["items"]["properties"][
|
||||
"domain"] = {"$ref": "#/definitions/domain"}
|
||||
|
||||
IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0['properties']['rules']['items']['properties'][
|
||||
'local']['items']['properties']['projects'] = PROJECTS_SCHEMA_2_0
|
||||
|
||||
|
||||
def get_default_attribute_mapping_schema_version():
|
||||
return CONF.federation.attribute_mapping_default_schema_version
|
||||
|
||||
|
||||
class DirectMaps(object):
|
||||
"""An abstraction around the remote matches.
|
||||
@ -272,7 +293,14 @@ class DirectMaps(object):
|
||||
|
||||
|
||||
def validate_mapping_structure(ref):
|
||||
v = jsonschema.Draft4Validator(MAPPING_SCHEMA)
|
||||
version = ref.get(
|
||||
'schema_version', get_default_attribute_mapping_schema_version())
|
||||
|
||||
LOG.debug("Validating mapping [%s] using validator from version [%s].",
|
||||
ref, version)
|
||||
|
||||
v = jsonschema.Draft4Validator(
|
||||
IDP_ATTRIBUTE_MAPPING_SCHEMAS[version]['schema'])
|
||||
|
||||
messages = ''
|
||||
for error in sorted(v.iter_errors(ref), key=str):
|
||||
@ -609,6 +637,19 @@ class RuleProcessor(object):
|
||||
group_names_list]
|
||||
return group_dicts
|
||||
|
||||
def normalize_user(self, user, default_mapping_domain):
|
||||
"""Parse and validate user mapping."""
|
||||
if user.get('type') is None:
|
||||
user['type'] = UserType.EPHEMERAL
|
||||
if user.get('type') not in (UserType.EPHEMERAL, UserType.LOCAL):
|
||||
msg = _("User type %s not supported") % user.get('type')
|
||||
raise exception.ValidationError(msg)
|
||||
|
||||
def extract_groups(self, groups_by_domain):
|
||||
for groups in list(groups_by_domain.values()):
|
||||
for group in list({g['name']: g for g in groups}.values()):
|
||||
yield group
|
||||
|
||||
def _transform(self, identity_values):
|
||||
"""Transform local mappings, to an easier to understand format.
|
||||
|
||||
@ -643,23 +684,6 @@ class RuleProcessor(object):
|
||||
:rtype: dict
|
||||
|
||||
"""
|
||||
def extract_groups(groups_by_domain):
|
||||
for groups in list(groups_by_domain.values()):
|
||||
for group in list({g['name']: g for g in groups}.values()):
|
||||
yield group
|
||||
|
||||
def normalize_user(user):
|
||||
"""Parse and validate user mapping."""
|
||||
user_type = user.get('type')
|
||||
|
||||
if user_type and user_type not in (UserType.EPHEMERAL,
|
||||
UserType.LOCAL):
|
||||
msg = _("User type %s not supported") % user_type
|
||||
raise exception.ValidationError(msg)
|
||||
|
||||
if user_type is None:
|
||||
user['type'] = UserType.EPHEMERAL
|
||||
|
||||
# initialize the group_ids as a set to eliminate duplicates
|
||||
user = {}
|
||||
group_ids = set()
|
||||
@ -683,18 +707,19 @@ class RuleProcessor(object):
|
||||
if 'user' in identity_value:
|
||||
# if a mapping outputs more than one user name, log it
|
||||
if user:
|
||||
LOG.warning('Ignoring user name')
|
||||
LOG.warning('Ignoring user [%s]',
|
||||
identity_value.get('user'))
|
||||
else:
|
||||
user = identity_value.get('user')
|
||||
|
||||
if 'group' in identity_value:
|
||||
group = identity_value['group']
|
||||
if 'id' in group:
|
||||
group_ids.add(group['id'])
|
||||
elif 'name' in group:
|
||||
domain = (group['domain'].get('name') or
|
||||
group['domain'].get('id'))
|
||||
groups_by_domain.setdefault(domain, list()).append(group)
|
||||
group_names.extend(extract_groups(groups_by_domain))
|
||||
groups = self.process_group_by_name(
|
||||
group, groups_by_domain)
|
||||
group_names.extend(groups)
|
||||
if 'groups' in identity_value:
|
||||
group_dicts = self._normalize_groups(identity_value)
|
||||
group_names.extend(group_dicts)
|
||||
@ -706,16 +731,25 @@ class RuleProcessor(object):
|
||||
# representation of a list.
|
||||
group_ids.update(
|
||||
self._ast_literal_eval(identity_value['group_ids']))
|
||||
if 'projects' in identity_value:
|
||||
projects = identity_value['projects']
|
||||
|
||||
normalize_user(user)
|
||||
if 'projects' in identity_value:
|
||||
projects = self.extract_projects(identity_value)
|
||||
|
||||
self.normalize_user(user, identity_value.get('domain'))
|
||||
|
||||
return {'user': user,
|
||||
'group_ids': list(group_ids),
|
||||
'group_names': group_names,
|
||||
'projects': projects}
|
||||
|
||||
def process_group_by_name(self, group, groups_by_domain):
|
||||
domain = (group['domain'].get('name') or group['domain'].get('id'))
|
||||
groups_by_domain.setdefault(domain, list()).append(group)
|
||||
return self.extract_groups(groups_by_domain)
|
||||
|
||||
def extract_projects(self, identity_value):
|
||||
return identity_value.get('projects', [])
|
||||
|
||||
def _update_local_mapping(self, local, direct_maps):
|
||||
"""Replace any {0}, {1} ... values with data from the assertion.
|
||||
|
||||
@ -944,3 +978,56 @@ def assert_enabled_service_provider_object(service_provider):
|
||||
tr_msg = _('Service Provider %(sp)s is disabled') % {'sp': sp_id}
|
||||
LOG.debug(msg)
|
||||
raise exception.Forbidden(tr_msg)
|
||||
|
||||
|
||||
class RuleProcessorToHonorDomainOption(RuleProcessor):
|
||||
"""Handles the default domain configured in the attribute mapping.
|
||||
|
||||
This rule processor is designed to handle the `domain` attribute
|
||||
configured at the root of the attribute mapping. When this attribute is
|
||||
configured, we should take it as the default one for the attribute
|
||||
mapping, instead of the domain of the IdP. Moreover, we should respect
|
||||
the override to it that can take place at the `groups`, `user`,
|
||||
and `projects` attributes definition.
|
||||
"""
|
||||
|
||||
def __init__(self, mapping_id, rules):
|
||||
super(RuleProcessorToHonorDomainOption, self).__init__(
|
||||
mapping_id, rules)
|
||||
|
||||
def extract_projects(self, identity_value):
|
||||
projects = identity_value.get("projects", [])
|
||||
default_mapping_domain = identity_value.get("domain")
|
||||
for project in projects:
|
||||
if not project.get("domain"):
|
||||
LOG.debug("Configuring the domain [%s] for project [%s].",
|
||||
default_mapping_domain, project)
|
||||
project["domain"] = default_mapping_domain
|
||||
return projects
|
||||
|
||||
def normalize_user(self, user, default_mapping_domain):
|
||||
super(RuleProcessorToHonorDomainOption, self).normalize_user(
|
||||
user, default_mapping_domain)
|
||||
if not user.get("domain"):
|
||||
LOG.debug("Configuring the domain [%s] for user [%s].",
|
||||
default_mapping_domain, user)
|
||||
user["domain"] = default_mapping_domain
|
||||
else:
|
||||
LOG.debug("The user [%s] was configured with a domain. "
|
||||
"Therefore, we do not need to define.", user)
|
||||
|
||||
|
||||
IDP_ATTRIBUTE_MAPPING_SCHEMAS = {
|
||||
"1.0": {"schema": IDP_ATTRIBUTE_MAPPING_SCHEMA_1_0,
|
||||
"processor": RuleProcessor},
|
||||
"2.0": {"schema": IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0,
|
||||
"processor": RuleProcessorToHonorDomainOption}
|
||||
}
|
||||
|
||||
|
||||
def create_attribute_mapping_rules_processor(mapping):
|
||||
version = mapping.get(
|
||||
'schema_version', get_default_attribute_mapping_schema_version())
|
||||
|
||||
return IDP_ATTRIBUTE_MAPPING_SCHEMAS[version]['processor'](
|
||||
mapping['id'], mapping['rules'])
|
||||
|
@ -1208,18 +1208,12 @@ class Manager(manager.Manager):
|
||||
|
||||
hints = driver_hints.Hints()
|
||||
hints.add_filter('user_id', user_id)
|
||||
fed_users = PROVIDERS.shadow_users_api.list_federated_users_info(hints)
|
||||
|
||||
driver.delete_user(entity_id)
|
||||
PROVIDERS.assignment_api.delete_user_assignments(user_id)
|
||||
self.get_user.invalidate(self, user_id)
|
||||
self.get_user_by_name.invalidate(self, user_old['name'],
|
||||
user_old['domain_id'])
|
||||
for fed_user in fed_users:
|
||||
self._shadow_federated_user.invalidate(
|
||||
self, fed_user['idp_id'], fed_user['protocol_id'],
|
||||
fed_user['unique_id'], fed_user['display_name'],
|
||||
user_old.get('extra', {}).get('email'))
|
||||
|
||||
PROVIDERS.credential_api.delete_credentials_for_user(user_id)
|
||||
PROVIDERS.id_mapping_api.delete_id_mapping(user_id)
|
||||
@ -1505,54 +1499,59 @@ class Manager(manager.Manager):
|
||||
except exception.UserNotFound:
|
||||
return PROVIDERS.shadow_users_api.create_nonlocal_user(user)
|
||||
|
||||
@MEMOIZE
|
||||
def _shadow_federated_user(self, idp_id, protocol_id, unique_id,
|
||||
display_name, email=None):
|
||||
def _shadow_federated_user(self, idp_id, protocol_id, user):
|
||||
user_dict = {}
|
||||
email = user.get('email')
|
||||
try:
|
||||
LOG.debug("Trying to update name for federated user [%s].", user)
|
||||
|
||||
PROVIDERS.shadow_users_api.update_federated_user_display_name(
|
||||
idp_id, protocol_id, unique_id, display_name)
|
||||
idp_id, protocol_id, user['id'], user['name'])
|
||||
user_dict = PROVIDERS.shadow_users_api.get_federated_user(
|
||||
idp_id, protocol_id, unique_id)
|
||||
idp_id, protocol_id, user['id'])
|
||||
|
||||
if email:
|
||||
LOG.debug("Executing the e-mail update for federated user "
|
||||
"[%s].", user)
|
||||
|
||||
user_ref = {"email": email}
|
||||
self.update_user(user_dict['id'], user_ref)
|
||||
user_dict.update({"email": email})
|
||||
except exception.UserNotFound:
|
||||
idp = PROVIDERS.federation_api.get_idp(idp_id)
|
||||
federated_dict = {
|
||||
'idp_id': idp_id,
|
||||
'protocol_id': protocol_id,
|
||||
'unique_id': unique_id,
|
||||
'display_name': display_name
|
||||
'unique_id': user['id'],
|
||||
'display_name': user['name']
|
||||
}
|
||||
LOG.debug("Creating federated user [%s].", user)
|
||||
user_dict = (
|
||||
PROVIDERS.shadow_users_api.create_federated_user(
|
||||
idp['domain_id'], federated_dict, email=email
|
||||
user["domain"]['id'],
|
||||
federated_dict, email=email
|
||||
)
|
||||
)
|
||||
PROVIDERS.shadow_users_api.set_last_active_at(user_dict['id'])
|
||||
return user_dict
|
||||
|
||||
def shadow_federated_user(self, idp_id, protocol_id, unique_id,
|
||||
display_name, email=None, group_ids=None):
|
||||
def shadow_federated_user(self, idp_id, protocol_id, user, group_ids=None):
|
||||
"""Map a federated user to a user.
|
||||
|
||||
:param idp_id: identity provider id
|
||||
:param protocol_id: protocol id
|
||||
:param unique_id: unique id for the user within the IdP
|
||||
:param display_name: user's display name
|
||||
:param email: user's email
|
||||
:param user: User dictionary
|
||||
:param group_ids: list of group ids to add the user to
|
||||
|
||||
:returns: dictionary of the mapped User entity
|
||||
"""
|
||||
user_dict = self._shadow_federated_user(
|
||||
idp_id, protocol_id, unique_id, display_name, email)
|
||||
user_dict = self._shadow_federated_user(idp_id, protocol_id, user)
|
||||
|
||||
# Note(knikolla): The shadowing operation can be cached,
|
||||
# however we need to update the expiring group memberships.
|
||||
if group_ids:
|
||||
for group_id in group_ids:
|
||||
LOG.info("Adding user [%s] to group [%s].",
|
||||
user_dict, group_id)
|
||||
PROVIDERS.shadow_users_api.add_user_to_group_expires(
|
||||
user_dict['id'], group_id)
|
||||
return user_dict
|
||||
|
152
keystone/tests/unit/auth/plugins/test_mapped.py
Normal file
152
keystone/tests/unit/auth/plugins/test_mapped.py
Normal file
@ -0,0 +1,152 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import uuid
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from keystone.assignment.core import Manager as AssignmentApi
|
||||
from keystone.auth.plugins import mapped
|
||||
from keystone.exception import ProjectNotFound
|
||||
from keystone.resource.core import Manager as ResourceApi
|
||||
from keystone.tests import unit
|
||||
|
||||
|
||||
class TestMappedPlugin(unit.TestCase):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(TestMappedPlugin, self).__init__(*args, **kwargs)
|
||||
|
||||
def setUp(self):
|
||||
super(TestMappedPlugin, self).setUp()
|
||||
self.resource_api_mock = mock.Mock(spec=ResourceApi)
|
||||
self.assignment_api_mock = mock.Mock(spec=AssignmentApi)
|
||||
self.domain_uuid_mock = uuid.uuid4().hex
|
||||
self.domain_mock = {'id': self.domain_uuid_mock}
|
||||
self.idp_domain_uuid_mock = uuid.uuid4().hex
|
||||
self.member_role_id = uuid.uuid4().hex
|
||||
self.member_role_name = "member"
|
||||
self.existing_roles = {
|
||||
self.member_role_name: {'id': self.member_role_id}}
|
||||
self.shadow_project_mock = {'name': "test-project",
|
||||
'roles': [{'name': self.member_role_name}]}
|
||||
self.shadow_project_in_domain_mock = {
|
||||
'name': "test-project-in-domain", 'domain': self.domain_mock,
|
||||
'roles': [{'name': self.member_role_name}]}
|
||||
self.shadow_projects_mock = [self.shadow_project_mock,
|
||||
self.shadow_project_in_domain_mock]
|
||||
self.user_mock = {'id': uuid.uuid4().hex,
|
||||
'name': "test-user"}
|
||||
|
||||
def test_configure_project_domain_no_project_domain(self):
|
||||
mapped.configure_project_domain(self.shadow_project_mock,
|
||||
self.idp_domain_uuid_mock,
|
||||
self.resource_api_mock)
|
||||
self.assertIn("domain", self.shadow_project_mock)
|
||||
self.assertEqual(self.idp_domain_uuid_mock,
|
||||
self.shadow_project_mock['domain']['id'])
|
||||
|
||||
def test_configure_project_domain_with_domain_id(self):
|
||||
self.shadow_project_mock['domain'] = self.domain_mock
|
||||
mapped.configure_project_domain(self.shadow_project_mock,
|
||||
self.idp_domain_uuid_mock,
|
||||
self.resource_api_mock)
|
||||
self.assertIn("domain", self.shadow_project_mock)
|
||||
self.assertEqual(self.domain_uuid_mock,
|
||||
self.shadow_project_mock['domain']['id'])
|
||||
|
||||
def test_configure_project_domain_with_domain_name(self):
|
||||
domain_name = "test-domain"
|
||||
self.shadow_project_mock['domain'] = {'name': domain_name}
|
||||
self.resource_api_mock.get_domain_by_name.return_value =\
|
||||
self.domain_mock
|
||||
mapped.configure_project_domain(self.shadow_project_mock,
|
||||
self.idp_domain_uuid_mock,
|
||||
self.resource_api_mock)
|
||||
self.assertIn("domain", self.shadow_project_mock)
|
||||
self.assertEqual(self.domain_uuid_mock,
|
||||
self.shadow_project_mock['domain']['id'])
|
||||
self.resource_api_mock.get_domain_by_name.assert_called_with(
|
||||
domain_name)
|
||||
|
||||
def test_handle_projects_from_mapping_project_exists(self):
|
||||
project_mock_1 = self.create_project_mock_for_shadow_project(
|
||||
self.shadow_project_mock)
|
||||
project_mock_2 = self.create_project_mock_for_shadow_project(
|
||||
self.shadow_project_in_domain_mock)
|
||||
self.resource_api_mock.get_project_by_name.side_effect = [
|
||||
project_mock_1, project_mock_2]
|
||||
mapped.handle_projects_from_mapping(self.shadow_projects_mock,
|
||||
self.idp_domain_uuid_mock,
|
||||
self.existing_roles,
|
||||
self.user_mock,
|
||||
self.assignment_api_mock,
|
||||
self.resource_api_mock
|
||||
)
|
||||
self.resource_api_mock.get_project_by_name.assert_has_calls([
|
||||
mock.call(self.shadow_project_in_domain_mock['name'],
|
||||
self.shadow_project_in_domain_mock['domain']['id']),
|
||||
mock.call(self.shadow_project_mock['name'],
|
||||
self.idp_domain_uuid_mock)], any_order=True)
|
||||
self.assignment_api_mock.create_grant.assert_has_calls([
|
||||
mock.call(self.member_role_id, user_id=self.user_mock['id'],
|
||||
project_id=project_mock_1['id']),
|
||||
mock.call(self.member_role_id, user_id=self.user_mock['id'],
|
||||
project_id=project_mock_2['id'])
|
||||
])
|
||||
|
||||
@mock.patch("uuid.UUID.hex", new_callable=mock.PropertyMock)
|
||||
def test_handle_projects_from_mapping_create_projects(self, uuid_mock):
|
||||
uuid_mock.return_value = "uuid"
|
||||
project_mock_1 = self.create_project_mock_for_shadow_project(
|
||||
self.shadow_project_mock)
|
||||
project_mock_2 = self.create_project_mock_for_shadow_project(
|
||||
self.shadow_project_in_domain_mock)
|
||||
self.resource_api_mock.get_project_by_name.side_effect = [
|
||||
ProjectNotFound(project_id=project_mock_1['name']),
|
||||
ProjectNotFound(project_id=project_mock_2['name'])]
|
||||
self.resource_api_mock.create_project.side_effect = [
|
||||
project_mock_1, project_mock_2]
|
||||
mapped.handle_projects_from_mapping(self.shadow_projects_mock,
|
||||
self.idp_domain_uuid_mock,
|
||||
self.existing_roles,
|
||||
self.user_mock,
|
||||
self.assignment_api_mock,
|
||||
self.resource_api_mock
|
||||
)
|
||||
self.resource_api_mock.get_project_by_name.assert_has_calls([
|
||||
mock.call(self.shadow_project_in_domain_mock['name'],
|
||||
self.shadow_project_in_domain_mock['domain']['id']),
|
||||
mock.call(self.shadow_project_mock['name'],
|
||||
self.idp_domain_uuid_mock)], any_order=True)
|
||||
expected_project_ref1 = {
|
||||
'id': "uuid",
|
||||
'name': self.shadow_project_mock['name'],
|
||||
'domain_id': self.idp_domain_uuid_mock
|
||||
}
|
||||
expected_project_ref2 = {
|
||||
'id': "uuid",
|
||||
'name': self.shadow_project_in_domain_mock['name'],
|
||||
'domain_id': self.shadow_project_in_domain_mock['domain']['id']
|
||||
}
|
||||
self.resource_api_mock.create_project.assert_has_calls([
|
||||
mock.call(expected_project_ref1['id'], expected_project_ref1),
|
||||
mock.call(expected_project_ref2['id'], expected_project_ref2)])
|
||||
self.assignment_api_mock.create_grant.assert_has_calls([
|
||||
mock.call(self.member_role_id, user_id=self.user_mock['id'],
|
||||
project_id=project_mock_1['id']),
|
||||
mock.call(self.member_role_id, user_id=self.user_mock['id'],
|
||||
project_id=project_mock_2['id'])
|
||||
])
|
||||
|
||||
def create_project_mock_for_shadow_project(self, shadow_project):
|
||||
project = shadow_project.copy()
|
||||
project['id'] = uuid.uuid4().hex
|
||||
return project
|
@ -23,6 +23,8 @@ from keystone.federation import utils as mapping_utils
|
||||
from keystone.tests import unit
|
||||
from keystone.tests.unit import mapping_fixtures
|
||||
|
||||
from unittest import mock
|
||||
|
||||
|
||||
CONF = keystone.conf.CONF
|
||||
FAKE_MAPPING_ID = uuid.uuid4().hex
|
||||
@ -553,10 +555,18 @@ class MappingRuleEngineTests(unit.BaseTestCase):
|
||||
self.assertIsNotNone(mapped_properties)
|
||||
self.assertValidMappedUserObject(mapped_properties)
|
||||
self.assertEqual('jsmith', mapped_properties['user']['name'])
|
||||
unique_id, display_name = mapped.get_user_unique_id_and_display_name(
|
||||
mapped_properties)
|
||||
self.assertEqual('jsmith', unique_id)
|
||||
self.assertEqual('jsmith', display_name)
|
||||
|
||||
resource_api_mock = mock.patch(
|
||||
'keystone.resource.core.DomainConfigManager')
|
||||
idp_domain_id = uuid.uuid4().hex
|
||||
mapped.validate_and_prepare_federated_user(mapped_properties,
|
||||
idp_domain_id,
|
||||
resource_api_mock)
|
||||
|
||||
self.assertEqual('jsmith', mapped_properties['user']['id'])
|
||||
self.assertEqual('jsmith', mapped_properties['user']['name'])
|
||||
self.assertEqual(idp_domain_id,
|
||||
mapped_properties['user']['domain']['id'])
|
||||
|
||||
def test_user_identifications_name_and_federated_domain(self):
|
||||
"""Test various mapping options and how users are identified.
|
||||
@ -576,10 +586,20 @@ class MappingRuleEngineTests(unit.BaseTestCase):
|
||||
mapped_properties = rp.process(assertion)
|
||||
self.assertIsNotNone(mapped_properties)
|
||||
self.assertValidMappedUserObject(mapped_properties)
|
||||
unique_id, display_name = mapped.get_user_unique_id_and_display_name(
|
||||
mapped_properties)
|
||||
self.assertEqual('tbo', display_name)
|
||||
self.assertEqual('abc123%40example.com', unique_id)
|
||||
|
||||
resource_api_mock = mock.patch(
|
||||
'keystone.resource.core.DomainConfigManager')
|
||||
idp_domain_id = uuid.uuid4().hex
|
||||
user_domain_id = mapped_properties['user']['domain']['id']
|
||||
mapped.validate_and_prepare_federated_user(mapped_properties,
|
||||
idp_domain_id,
|
||||
resource_api_mock)
|
||||
|
||||
self.assertEqual('tbo', mapped_properties['user']['name'])
|
||||
self.assertEqual('abc123%40example.com',
|
||||
mapped_properties['user']['id'])
|
||||
self.assertEqual(user_domain_id,
|
||||
mapped_properties['user']['domain']['id'])
|
||||
|
||||
def test_user_identification_id(self):
|
||||
"""Test various mapping options and how users are identified.
|
||||
@ -600,10 +620,17 @@ class MappingRuleEngineTests(unit.BaseTestCase):
|
||||
self.assertIsNotNone(mapped_properties)
|
||||
self.assertValidMappedUserObject(mapped_properties)
|
||||
with self.flask_app.test_request_context():
|
||||
unique_id, display_name = (
|
||||
mapped.get_user_unique_id_and_display_name(mapped_properties))
|
||||
self.assertEqual('bob', unique_id)
|
||||
self.assertEqual('bob', display_name)
|
||||
resource_api_mock = mock.patch(
|
||||
'keystone.resource.core.DomainConfigManager')
|
||||
idp_domain_id = uuid.uuid4().hex
|
||||
mapped.validate_and_prepare_federated_user(mapped_properties,
|
||||
idp_domain_id,
|
||||
resource_api_mock)
|
||||
|
||||
self.assertEqual('bob', mapped_properties['user']['name'])
|
||||
self.assertEqual('bob', mapped_properties['user']['id'])
|
||||
self.assertEqual(idp_domain_id,
|
||||
mapped_properties['user']['domain']['id'])
|
||||
|
||||
def test_get_user_unique_id_and_display_name(self):
|
||||
|
||||
@ -616,10 +643,17 @@ class MappingRuleEngineTests(unit.BaseTestCase):
|
||||
self.assertValidMappedUserObject(mapped_properties)
|
||||
with self.flask_app.test_request_context(
|
||||
environ_base={'REMOTE_USER': 'remote_user'}):
|
||||
unique_id, display_name = (
|
||||
mapped.get_user_unique_id_and_display_name(mapped_properties))
|
||||
self.assertEqual('bob', unique_id)
|
||||
self.assertEqual('remote_user', display_name)
|
||||
resource_api_mock = mock.patch(
|
||||
'keystone.resource.core.DomainConfigManager')
|
||||
idp_domain_id = uuid.uuid4().hex
|
||||
mapped.validate_and_prepare_federated_user(mapped_properties,
|
||||
idp_domain_id,
|
||||
resource_api_mock)
|
||||
|
||||
self.assertEqual('remote_user', mapped_properties['user']['name'])
|
||||
self.assertEqual('bob', mapped_properties['user']['id'])
|
||||
self.assertEqual(idp_domain_id,
|
||||
mapped_properties['user']['domain']['id'])
|
||||
|
||||
def test_user_identification_id_and_name(self):
|
||||
"""Test various mapping options and how users are identified.
|
||||
@ -650,11 +684,20 @@ class MappingRuleEngineTests(unit.BaseTestCase):
|
||||
mapped_properties = rp.process(assertion)
|
||||
self.assertIsNotNone(mapped_properties)
|
||||
self.assertValidMappedUserObject(mapped_properties)
|
||||
unique_id, display_name = (
|
||||
mapped.get_user_unique_id_and_display_name(mapped_properties)
|
||||
)
|
||||
self.assertEqual(exp_user_name, display_name)
|
||||
self.assertEqual('abc123%40example.com', unique_id)
|
||||
|
||||
resource_api_mock = mock.patch(
|
||||
'keystone.resource.core.DomainConfigManager')
|
||||
idp_domain_id = uuid.uuid4().hex
|
||||
user_domain_id = mapped_properties['user']['domain']['id']
|
||||
mapped.validate_and_prepare_federated_user(mapped_properties,
|
||||
idp_domain_id,
|
||||
resource_api_mock)
|
||||
|
||||
self.assertEqual(exp_user_name, mapped_properties['user']['name'])
|
||||
self.assertEqual('abc123%40example.com',
|
||||
mapped_properties['user']['id'])
|
||||
self.assertEqual(user_domain_id,
|
||||
mapped_properties['user']['domain']['id'])
|
||||
|
||||
def test_whitelist_pass_through(self):
|
||||
mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH
|
||||
|
250
keystone/tests/unit/federation/test_utils.py
Normal file
250
keystone/tests/unit/federation/test_utils.py
Normal file
@ -0,0 +1,250 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import copy
|
||||
import uuid
|
||||
|
||||
from keystone.exception import ValidationError
|
||||
from keystone.federation import utils
|
||||
from keystone.tests import unit
|
||||
|
||||
|
||||
class TestFederationUtils(unit.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestFederationUtils, self).setUp()
|
||||
self.mapping_id_mock = uuid.uuid4().hex
|
||||
self.domain_id_mock = uuid.uuid4().hex
|
||||
self.domain_mock = {'id': self.domain_id_mock}
|
||||
self.attribute_mapping_schema_1_0 = {
|
||||
"id": self.mapping_id_mock,
|
||||
"schema_version": '1.0',
|
||||
"rules": [
|
||||
{
|
||||
"remote": [
|
||||
{
|
||||
"type": "OIDC-preferred_username"
|
||||
},
|
||||
{
|
||||
"type": "OIDC-email"
|
||||
},
|
||||
{
|
||||
"type": "OIDC-openstack-user-domain"
|
||||
},
|
||||
{
|
||||
"type": "OIDC-openstack-default-project"
|
||||
},
|
||||
{
|
||||
"type": "OIDC-openstack-user-status",
|
||||
"any_one_of": [
|
||||
"local"
|
||||
]
|
||||
},
|
||||
],
|
||||
"local": [
|
||||
{
|
||||
"domain": {
|
||||
"name": "{2}"
|
||||
},
|
||||
"user": {
|
||||
"domain": {
|
||||
"name": "{2}"
|
||||
},
|
||||
"type": "local",
|
||||
"name": "{0}",
|
||||
"email": "{1}"
|
||||
},
|
||||
"projects": [
|
||||
{
|
||||
"name": "{3}",
|
||||
"roles": [
|
||||
{
|
||||
"name": "member"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}]
|
||||
}
|
||||
self.attribute_mapping_schema_2_0 = copy.deepcopy(
|
||||
self.attribute_mapping_schema_1_0)
|
||||
self.attribute_mapping_schema_2_0['schema_version'] = '2.0'
|
||||
self.attribute_mapping_schema_2_0['rules'][0]['local'][0]["projects"][
|
||||
0]['domain'] = {"name": "{some_place_holder}"}
|
||||
self.rule_processor = utils.RuleProcessor(
|
||||
self.mapping_id_mock, self.attribute_mapping_schema_1_0)
|
||||
self.rule_processor_schema_2_0 =\
|
||||
utils.RuleProcessorToHonorDomainOption(
|
||||
self.mapping_id_mock, self.attribute_mapping_schema_2_0)
|
||||
|
||||
def test_validate_mapping_structure_schema1_0(self):
|
||||
utils.validate_mapping_structure(self.attribute_mapping_schema_1_0)
|
||||
|
||||
def test_validate_mapping_structure_schema2_0(self):
|
||||
utils.validate_mapping_structure(self.attribute_mapping_schema_2_0)
|
||||
|
||||
def test_normalize_user_no_type_set(self):
|
||||
user = {}
|
||||
self.rule_processor.normalize_user(user, self.domain_mock)
|
||||
self.assertEqual(utils.UserType.EPHEMERAL, user['type'])
|
||||
|
||||
def test_normalize_user_unexpected_type(self):
|
||||
user = {'type': "weird-type"}
|
||||
self.assertRaises(ValidationError, self.rule_processor.normalize_user,
|
||||
user, self.domain_mock)
|
||||
|
||||
def test_normalize_user_type_local(self):
|
||||
user = {'type': utils.UserType.LOCAL}
|
||||
self.rule_processor.normalize_user(user, self.domain_mock)
|
||||
self.assertEqual(utils.UserType.LOCAL, user['type'])
|
||||
|
||||
def test_normalize_user_type_ephemeral(self):
|
||||
user = {'type': utils.UserType.EPHEMERAL}
|
||||
self.rule_processor.normalize_user(user, self.domain_mock)
|
||||
self.assertEqual(utils.UserType.EPHEMERAL, user['type'])
|
||||
|
||||
def test_extract_groups(self):
|
||||
group1 = {'name': "group1", 'domain': self.domain_id_mock}
|
||||
group_by_domain = {self.domain_id_mock: [group1]}
|
||||
|
||||
result = utils.RuleProcessor(
|
||||
self.mapping_id_mock,
|
||||
self.attribute_mapping_schema_1_0).extract_groups(group_by_domain)
|
||||
|
||||
self.assertEqual([group1], list(result))
|
||||
|
||||
def test_process_group_by_name_domain_with_name_only(self):
|
||||
domain = {'name': "domain1"}
|
||||
group1 = {'name': "group1", 'domain': domain}
|
||||
group_by_domain = {}
|
||||
result = self.rule_processor.process_group_by_name(
|
||||
group1, group_by_domain)
|
||||
self.assertEqual([group1], list(result))
|
||||
self.assertEqual([domain["name"]], list(group_by_domain.keys()))
|
||||
|
||||
def test_process_group_by_name_domain_with_id_only(self):
|
||||
group1 = {'name': "group1", 'domain': self.domain_mock}
|
||||
group_by_domain = {}
|
||||
result = self.rule_processor.process_group_by_name(
|
||||
group1, group_by_domain)
|
||||
self.assertEqual([group1], list(result))
|
||||
self.assertEqual([self.domain_id_mock], list(group_by_domain.keys()))
|
||||
|
||||
def test_process_group_by_name_domain_with_id_and_name(self):
|
||||
self.domain_mock['name'] = "domain1"
|
||||
group1 = {'name': "group1", 'domain': self.domain_mock}
|
||||
group_by_domain = {}
|
||||
result = self.rule_processor.process_group_by_name(
|
||||
group1, group_by_domain)
|
||||
self.assertEqual([group1], list(result))
|
||||
self.assertEqual(["domain1"], list(group_by_domain.keys()))
|
||||
|
||||
def test_process_group_by_name_groups_same_domain(self):
|
||||
group1 = {'name': "group1", 'domain': self.domain_mock}
|
||||
group2 = {'name': "group2", 'domain': self.domain_mock}
|
||||
group_by_domain = {self.domain_id_mock: [group1]}
|
||||
result = self.rule_processor.process_group_by_name(
|
||||
group2, group_by_domain)
|
||||
self.assertEqual([group1, group2], list(result))
|
||||
self.assertEqual([self.domain_id_mock], list(group_by_domain.keys()))
|
||||
|
||||
def test_process_group_by_name_groups_different_domain(self):
|
||||
domain = {'name': "domain1"}
|
||||
group1 = {'name': "group1", 'domain': domain}
|
||||
group2 = {'name': "group2", 'domain': self.domain_mock}
|
||||
group_by_domain = {"domain1": [group1]}
|
||||
result = self.rule_processor.process_group_by_name(
|
||||
group2, group_by_domain)
|
||||
self.assertEqual([group1, group2], list(result))
|
||||
self.assertEqual(["domain1", self.domain_id_mock],
|
||||
list(group_by_domain.keys()))
|
||||
|
||||
def test_rule_processor_extract_projects_schema1_0_no_projects(self):
|
||||
result = self.rule_processor.extract_projects({})
|
||||
self.assertEqual([], result)
|
||||
|
||||
def test_rule_processor_extract_projects_schema1_0(self):
|
||||
projects_list = [{'name': "project1", 'domain': self.domain_mock}]
|
||||
identity_values = {'projects': projects_list}
|
||||
result = self.rule_processor.extract_projects(identity_values)
|
||||
self.assertEqual(projects_list, result)
|
||||
|
||||
def test_rule_processor_extract_projects_schema2_0_no_projects(self):
|
||||
result = self.rule_processor_schema_2_0.extract_projects({})
|
||||
self.assertEqual([], result)
|
||||
|
||||
def test_rule_processor_extract_projects_schema2_0_domain_in_project(self):
|
||||
projects_list = [{'name': "project1", 'domain': self.domain_mock}]
|
||||
identity_values = {'projects': projects_list}
|
||||
result = self.rule_processor_schema_2_0.extract_projects(
|
||||
identity_values)
|
||||
self.assertEqual(projects_list, result)
|
||||
|
||||
def test_rule_processor_extract_projects_schema2_0_no_domain(self):
|
||||
projects_list = [{'name': "project1"}]
|
||||
identity_values = {'projects': projects_list}
|
||||
result = self.rule_processor_schema_2_0.extract_projects(
|
||||
identity_values)
|
||||
self.assertEqual(projects_list, result)
|
||||
|
||||
def test_rule_processor_extract_projects_schema2_0_no_domain_project(self):
|
||||
project = {'name': "project1"}
|
||||
identity_values = {'projects': [project.copy()],
|
||||
'domain': self.domain_mock}
|
||||
result = self.rule_processor_schema_2_0.extract_projects(
|
||||
identity_values)
|
||||
expected_project = project.copy()
|
||||
expected_project['domain'] = self.domain_mock
|
||||
self.assertEqual([expected_project], result)
|
||||
|
||||
def test_normalize_user_no_type_set_schema_2_0(self):
|
||||
user = {}
|
||||
self.rule_processor_schema_2_0.normalize_user(user, self.domain_mock)
|
||||
self.assertEqual(utils.UserType.EPHEMERAL, user['type'])
|
||||
|
||||
def test_normalize_user_unexpected_type_schema_2_0(self):
|
||||
user = {'type': "weird-type"}
|
||||
self.assertRaises(ValidationError,
|
||||
self.rule_processor_schema_2_0.normalize_user, user,
|
||||
self.domain_mock)
|
||||
|
||||
def test_normalize_user_type_local_schema_2_0(self):
|
||||
user = {'type': utils.UserType.LOCAL}
|
||||
self.rule_processor_schema_2_0.normalize_user(user, self.domain_mock)
|
||||
self.assertEqual(utils.UserType.LOCAL, user['type'])
|
||||
|
||||
def test_normalize_user_type_ephemeral_schema_2_0(self):
|
||||
user = {'type': utils.UserType.EPHEMERAL}
|
||||
self.rule_processor_schema_2_0.normalize_user(user, self.domain_mock)
|
||||
self.assertEqual(utils.UserType.EPHEMERAL, user['type'])
|
||||
|
||||
def test_normalize_user_no_domain_schema_2_0(self):
|
||||
user = {}
|
||||
self.rule_processor_schema_2_0.normalize_user(user, self.domain_mock)
|
||||
self.assertEqual(utils.UserType.EPHEMERAL, user['type'])
|
||||
self.assertEqual(self.domain_mock, user.get("domain"))
|
||||
|
||||
def test_create_attribute_mapping_rules_processor_default(self):
|
||||
result = utils.create_attribute_mapping_rules_processor(
|
||||
self.attribute_mapping_schema_1_0)
|
||||
self.assertIsInstance(result, utils.RuleProcessor)
|
||||
|
||||
def test_create_attribute_mapping_rules_processor_schema1_0(self):
|
||||
result = utils.create_attribute_mapping_rules_processor(
|
||||
self.attribute_mapping_schema_1_0)
|
||||
self.assertIsInstance(result, utils.RuleProcessor)
|
||||
|
||||
def test_create_attribute_mapping_rules_processor_schema2_0(self):
|
||||
result = utils.create_attribute_mapping_rules_processor(
|
||||
self.attribute_mapping_schema_2_0)
|
||||
self.assertIsInstance(result, utils.RuleProcessorToHonorDomainOption)
|
@ -10,6 +10,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import uuid
|
||||
|
||||
from keystone.common import driver_hints
|
||||
@ -20,17 +21,21 @@ PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
class ShadowUsersCoreTests(object):
|
||||
def test_shadow_federated_user(self):
|
||||
federated_user1 = copy.deepcopy(self.federated_user)
|
||||
ShadowUsersCoreTests.normalize_federated_user_properties_for_test(
|
||||
federated_user1, email=self.email
|
||||
)
|
||||
|
||||
user = PROVIDERS.identity_api.shadow_federated_user(
|
||||
self.federated_user['idp_id'],
|
||||
self.federated_user['protocol_id'],
|
||||
self.federated_user['unique_id'],
|
||||
self.federated_user['display_name'],
|
||||
self.email)
|
||||
self.federated_user['idp_id'], self.federated_user['protocol_id'],
|
||||
federated_user1)
|
||||
|
||||
self.assertIsNotNone(user['id'])
|
||||
self.assertEqual(7, len(user.keys()))
|
||||
self.assertIsNotNone(user['name'])
|
||||
self.assertIsNone(user['password_expires_at'])
|
||||
self.assertIsNotNone(user['domain_id'])
|
||||
|
||||
# NOTE(breton): below, attribute `enabled` is explicitly tested to be
|
||||
# equal True. assertTrue should not be used, because it converts
|
||||
# the passed value to bool().
|
||||
@ -38,58 +43,73 @@ class ShadowUsersCoreTests(object):
|
||||
self.assertIsNotNone(user['email'])
|
||||
|
||||
def test_shadow_existing_federated_user(self):
|
||||
federated_user1 = copy.deepcopy(self.federated_user)
|
||||
ShadowUsersCoreTests.normalize_federated_user_properties_for_test(
|
||||
federated_user1, email=self.email
|
||||
)
|
||||
|
||||
# introduce the user to keystone for the first time
|
||||
shadow_user1 = PROVIDERS.identity_api.shadow_federated_user(
|
||||
self.federated_user['idp_id'],
|
||||
self.federated_user['protocol_id'],
|
||||
self.federated_user['unique_id'],
|
||||
self.federated_user['display_name'])
|
||||
self.assertEqual(self.federated_user['display_name'],
|
||||
shadow_user1['name'])
|
||||
self.federated_user['idp_id'], self.federated_user['protocol_id'],
|
||||
federated_user1)
|
||||
|
||||
self.assertEqual(federated_user1['display_name'], shadow_user1['name'])
|
||||
|
||||
# shadow the user again, with another name to invalidate the cache
|
||||
# internally, this operation causes request to the driver. It should
|
||||
# not fail.
|
||||
self.federated_user['display_name'] = uuid.uuid4().hex
|
||||
federated_user2 = copy.deepcopy(self.federated_user)
|
||||
federated_user2['display_name'] = uuid.uuid4().hex
|
||||
ShadowUsersCoreTests.normalize_federated_user_properties_for_test(
|
||||
federated_user2, email=self.email
|
||||
)
|
||||
|
||||
shadow_user2 = PROVIDERS.identity_api.shadow_federated_user(
|
||||
self.federated_user['idp_id'],
|
||||
self.federated_user['protocol_id'],
|
||||
self.federated_user['unique_id'],
|
||||
self.federated_user['display_name'])
|
||||
self.assertEqual(self.federated_user['display_name'],
|
||||
shadow_user2['name'])
|
||||
self.federated_user['idp_id'], self.federated_user['protocol_id'],
|
||||
federated_user2)
|
||||
self.assertEqual(federated_user2['display_name'], shadow_user2['name'])
|
||||
self.assertNotEqual(shadow_user1['name'], shadow_user2['name'])
|
||||
|
||||
# The shadowed users still share the same unique ID.
|
||||
self.assertEqual(shadow_user1['id'], shadow_user2['id'])
|
||||
|
||||
def test_shadow_federated_user_not_creating_a_local_user(self):
|
||||
federated_user1 = copy.deepcopy(self.federated_user)
|
||||
ShadowUsersCoreTests.normalize_federated_user_properties_for_test(
|
||||
federated_user1, email="some_id@mail.provider"
|
||||
)
|
||||
|
||||
PROVIDERS.identity_api.shadow_federated_user(
|
||||
self.federated_user['idp_id'],
|
||||
self.federated_user['protocol_id'],
|
||||
self.federated_user['unique_id'],
|
||||
self.federated_user['display_name'],
|
||||
"some_id@mail.provider")
|
||||
federated_user1['idp_id'], federated_user1['protocol_id'],
|
||||
federated_user1)
|
||||
|
||||
hints = driver_hints.Hints()
|
||||
hints.add_filter('name', self.federated_user['display_name'])
|
||||
hints.add_filter('name', federated_user1['display_name'])
|
||||
users = PROVIDERS.identity_api.list_users(hints=hints)
|
||||
|
||||
self.assertEqual(1, len(users))
|
||||
|
||||
federated_user2 = copy.deepcopy(federated_user1)
|
||||
# Avoid caching
|
||||
self.federated_user['display_name'] = uuid.uuid4().hex
|
||||
federated_user2['name'] = uuid.uuid4().hex
|
||||
federated_user2['id'] = uuid.uuid4().hex
|
||||
federated_user2['email'] = "some_id_2@mail.provider"
|
||||
|
||||
PROVIDERS.identity_api.shadow_federated_user(
|
||||
self.federated_user['idp_id'],
|
||||
self.federated_user['protocol_id'],
|
||||
self.federated_user['unique_id'],
|
||||
self.federated_user['display_name'],
|
||||
"some_id@mail.provider")
|
||||
federated_user2['idp_id'], federated_user2['protocol_id'],
|
||||
federated_user2)
|
||||
|
||||
hints.add_filter('name', self.federated_user['display_name'])
|
||||
hints.add_filter('name', federated_user2['display_name'])
|
||||
users = PROVIDERS.identity_api.list_users(hints=hints)
|
||||
|
||||
# The number os users must remain 1
|
||||
self.assertEqual(1, len(users))
|
||||
|
||||
@staticmethod
|
||||
def normalize_federated_user_properties_for_test(federated_user,
|
||||
email=None):
|
||||
federated_user['email'] = email
|
||||
federated_user['id'] = federated_user['unique_id']
|
||||
federated_user['name'] = federated_user['display_name']
|
||||
if not federated_user.get('domain'):
|
||||
federated_user['domain'] = {'id': uuid.uuid4().hex}
|
||||
|
@ -714,6 +714,32 @@ MAPPING_EPHEMERAL_USER = {
|
||||
]
|
||||
}
|
||||
|
||||
MAPPING_EPHEMERAL_USER_REMOTE_DOMAIN = {
|
||||
"rules": [
|
||||
{
|
||||
"local": [
|
||||
{
|
||||
"user": {
|
||||
"name": "{0}",
|
||||
"domain": {
|
||||
"name": "{1}"
|
||||
},
|
||||
"type": "ephemeral"
|
||||
}
|
||||
}
|
||||
],
|
||||
"remote": [
|
||||
{
|
||||
"type": "UserName"
|
||||
},
|
||||
{
|
||||
"type": "OIDC-openstack-user-domain"
|
||||
},
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
MAPPING_GROUPS_WHITELIST = {
|
||||
"rules": [
|
||||
{
|
||||
@ -1749,6 +1775,11 @@ GROUPS_DOMAIN_ASSERTION = {
|
||||
'JSON:{"name":"group2","domain":{"name":"yyy"}}'
|
||||
}
|
||||
|
||||
USER_WITH_DOMAIN_ASSERTION = {
|
||||
'UserName': 'marek',
|
||||
'OIDC-openstack-user-domain': 'user_domain'
|
||||
}
|
||||
|
||||
MAPPING_UNICODE = {
|
||||
"rules": [
|
||||
{
|
||||
|
@ -41,7 +41,8 @@ class SqlFederation(test_backend_sql.SqlModels):
|
||||
|
||||
def test_mapping(self):
|
||||
cols = (('id', sql.String, 64),
|
||||
('rules', sql.JsonBlob, None))
|
||||
('rules', sql.JsonBlob, None),
|
||||
('schema_version', sql.String, 5))
|
||||
self.assertExpectedSchema('mapping', cols)
|
||||
|
||||
def test_service_provider(self):
|
||||
|
@ -680,15 +680,20 @@ class SqlIdentity(SqlTests,
|
||||
new_group = PROVIDERS.identity_api.create_group(new_group)
|
||||
|
||||
fed_dict = unit.new_federated_user_ref()
|
||||
fed_dict['id'] = fed_dict['unique_id']
|
||||
fed_dict['name'] = fed_dict['display_name']
|
||||
fed_dict['domain'] = {'id': uuid.uuid4().hex}
|
||||
|
||||
fed_dict['idp_id'] = 'myidp'
|
||||
fed_dict['protocol_id'] = 'mapped'
|
||||
|
||||
with freezegun.freeze_time(time - tick) as frozen_time:
|
||||
user = PROVIDERS.identity_api.shadow_federated_user(
|
||||
**fed_dict, group_ids=[new_group['id']])
|
||||
fed_dict['idp_id'], fed_dict['protocol_id'],
|
||||
fed_dict, group_ids=[new_group['id']])
|
||||
|
||||
PROVIDERS.identity_api.check_user_in_group(user['id'],
|
||||
new_group['id'])
|
||||
PROVIDERS.identity_api.check_user_in_group(
|
||||
user['id'], new_group['id'])
|
||||
|
||||
# Expiration
|
||||
frozen_time.tick(tick)
|
||||
@ -699,7 +704,8 @@ class SqlIdentity(SqlTests,
|
||||
|
||||
# Renewal
|
||||
PROVIDERS.identity_api.shadow_federated_user(
|
||||
**fed_dict, group_ids=[new_group['id']])
|
||||
fed_dict['idp_id'], fed_dict['protocol_id'], fed_dict,
|
||||
group_ids=[new_group['id']])
|
||||
PROVIDERS.identity_api.check_user_in_group(user['id'],
|
||||
new_group['id'])
|
||||
|
||||
|
@ -1866,6 +1866,7 @@ class TestMappingEngineTester(unit.BaseTestCase):
|
||||
self.input = parent.command_input
|
||||
self.prefix = parent.command_prefix
|
||||
self.engine_debug = parent.command_engine_debug
|
||||
self.mapping_schema_version = parent.mapping_schema_version
|
||||
|
||||
def setUp(self):
|
||||
# Set up preset cli options and a parser
|
||||
@ -1885,6 +1886,8 @@ class TestMappingEngineTester(unit.BaseTestCase):
|
||||
subparsers = parser_test.add_subparsers()
|
||||
self.parser = cli.MappingEngineTester.add_argument_parser(subparsers)
|
||||
|
||||
self.mapping_schema_version = '1.0'
|
||||
|
||||
def config_files(self):
|
||||
config_files = super(TestMappingEngineTester, self).config_files()
|
||||
config_files.append(unit.dirs.tests_conf('backend_sql.conf'))
|
||||
|
@ -31,6 +31,7 @@ class ConfigTestCase(unit.TestCase):
|
||||
sample_file = 'keystone.conf.sample'
|
||||
args = ['--namespace', 'keystone', '--output-file',
|
||||
unit.dirs.etc(sample_file)]
|
||||
|
||||
generator.main(args=args)
|
||||
config_files.insert(0, unit.dirs.etc(sample_file))
|
||||
self.addCleanup(os.remove, unit.dirs.etc(sample_file))
|
||||
|
@ -210,7 +210,8 @@ class AuthContextMiddlewareTest(test_backend_sql.SqlTests,
|
||||
mapped_rules = rules.get('rules', {})
|
||||
return {
|
||||
'id': uuid.uuid4().hex,
|
||||
'rules': mapped_rules
|
||||
'rules': mapped_rules,
|
||||
'schema_version': "1.0"
|
||||
}
|
||||
|
||||
def _assert_tokenless_auth_context(self, context, ephemeral_user=False):
|
||||
|
@ -221,16 +221,16 @@ class KeystoneMigrationsWalk(
|
||||
'duplicate_trust_constraint',
|
||||
{x['name'] for x in constraints},
|
||||
)
|
||||
self.assertNotIn(
|
||||
[
|
||||
'trustor_user_id',
|
||||
'trustee_user_id',
|
||||
'project_id',
|
||||
'impersonation',
|
||||
'expires_at',
|
||||
],
|
||||
{x['column_names'] for x in constraints},
|
||||
)
|
||||
|
||||
all_constraints = []
|
||||
for c in constraints:
|
||||
all_constraints + c.get('column_names', [])
|
||||
|
||||
not_allowed_constraints = ['trustor_user_id', 'trustee_user_id',
|
||||
'project_id', 'impersonation', 'expires_at',
|
||||
]
|
||||
for not_c in not_allowed_constraints:
|
||||
self.assertNotIn(not_c, all_constraints)
|
||||
|
||||
def _check_b4f8b3f584e0(self, connection):
|
||||
inspector = sqlalchemy.inspect(connection)
|
||||
@ -301,6 +301,26 @@ class KeystoneMigrationsWalk(
|
||||
found = True
|
||||
self.assertTrue(found, 'Failed to find column')
|
||||
|
||||
def _pre_upgrade_47147121(self, connection):
|
||||
inspector = sqlalchemy.inspect(connection)
|
||||
columns = inspector.get_columns('mapping')
|
||||
|
||||
all_column_names = []
|
||||
for c in columns:
|
||||
all_column_names.append(c.get('name'))
|
||||
|
||||
self.assertNotIn('schema_version', all_column_names)
|
||||
|
||||
def _check_47147121(self, connection):
|
||||
inspector = sqlalchemy.inspect(connection)
|
||||
columns = inspector.get_columns('mapping')
|
||||
|
||||
all_column_names = []
|
||||
for c in columns:
|
||||
all_column_names.append(c.get('name'))
|
||||
|
||||
self.assertIn('schema_version', all_column_names)
|
||||
|
||||
def test_single_base_revision(self):
|
||||
"""Ensure we only have a single base revision.
|
||||
|
||||
|
@ -142,7 +142,7 @@ INITIAL_TABLE_STRUCTURE = {
|
||||
'id', 'idp_id', 'mapping_id', 'remote_id_attribute',
|
||||
],
|
||||
'mapping': [
|
||||
'id', 'rules',
|
||||
'id', 'rules', 'schema_version',
|
||||
],
|
||||
'service_provider': [
|
||||
'auth_url', 'id', 'enabled', 'description', 'sp_url',
|
||||
|
@ -179,7 +179,8 @@ class FederatedSetupMixin(object):
|
||||
def mapping_ref(self, rules=None):
|
||||
return {
|
||||
'id': uuid.uuid4().hex,
|
||||
'rules': rules or self.rules['rules']
|
||||
'rules': rules or self.rules['rules'],
|
||||
'schema_version': "1.0"
|
||||
}
|
||||
|
||||
def _scope_request(self, unscoped_token_id, scope, scope_id):
|
||||
@ -3036,6 +3037,27 @@ class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin):
|
||||
environment={uuid.uuid4().hex: self.REMOTE_IDS[0]}
|
||||
)
|
||||
|
||||
def test_issue_token_for_ephemeral_user_with_remote_domain(self):
|
||||
"""Test ephemeral user is created in the domain set by assertion.
|
||||
|
||||
Shadow user may belong to the domain set by the assertion data.
|
||||
To verify that:
|
||||
- precreate domain used later in the assertion
|
||||
- update mapping to unclude user domain name coming from assertion
|
||||
- auth user
|
||||
- verify user domain is not the IDP domain
|
||||
|
||||
"""
|
||||
domain_ref = unit.new_domain_ref(name="user_domain")
|
||||
PROVIDERS.resource_api.create_domain(domain_ref["id"], domain_ref)
|
||||
|
||||
PROVIDERS.federation_api.update_mapping(
|
||||
self.mapping["id"],
|
||||
mapping_fixtures.MAPPING_EPHEMERAL_USER_REMOTE_DOMAIN)
|
||||
r = self._issue_unscoped_token(assertion='USER_WITH_DOMAIN_ASSERTION')
|
||||
self.assertEqual(r.user_domain["id"], domain_ref["id"])
|
||||
self.assertNotEqual(r.user_domain["id"], self.idp["domain_id"])
|
||||
|
||||
|
||||
class FernetFederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin):
|
||||
AUTH_METHOD = 'token'
|
||||
|
Loading…
Reference in New Issue
Block a user