diff --git a/keystone/tests/unit/token/test_fernet_provider.py b/keystone/tests/unit/token/test_fernet_provider.py index 5dbc270571..dbf4c177be 100644 --- a/keystone/tests/unit/token/test_fernet_provider.py +++ b/keystone/tests/unit/token/test_fernet_provider.py @@ -49,10 +49,14 @@ class TestFernetTokenProvider(unit.TestCase): self.assertFalse(self.provider.needs_persistence()) def test_invalid_v3_token_raises_token_not_found(self): + # NOTE(lbragstad): Here we use the validate_non_persistent_token() + # methods because the validate_v3_token() method is strictly for + # validating UUID formatted tokens. It is written to assume cached + # tokens from a backend, where validate_non_persistent_token() is not. token_id = uuid.uuid4().hex e = self.assertRaises( exception.TokenNotFound, - self.provider.validate_v3_token, + self.provider.validate_non_persistent_token, token_id) self.assertIn(token_id, u'%s' % e) diff --git a/keystone/token/provider.py b/keystone/token/provider.py index 55c48b0db5..ca8441501d 100644 --- a/keystone/token/provider.py +++ b/keystone/token/provider.py @@ -253,19 +253,19 @@ class Manager(manager.Manager): if not token_id: raise exception.TokenNotFound(_('No token in the request')) - unique_id = utils.generate_unique_id(token_id) # NOTE(lbragstad): Only go to persistent storage if we have a token to # fetch from the backend (the driver persists the token). Otherwise # the information about the token must be in the token id. if not self._needs_persistence: - token_ref = token_id + token_ref = self.validate_non_persistent_token(token_id) else: + unique_id = utils.generate_unique_id(token_id) # NOTE(morganfainberg): Ensure we never use the long-form token_id # (PKI) as part of the cache_key. token_ref = self._persistence.get_token(unique_id) - token = self._validate_v3_token(token_ref) - self._is_valid_token(token) - return token + token_ref = self._validate_v3_token(token_ref) + self._is_valid_token(token_ref) + return token_ref @MEMOIZE def _validate_token(self, token_id): @@ -273,7 +273,9 @@ class Manager(manager.Manager): raise exception.TokenNotFound(_('No token in the request')) if not self._needs_persistence: - return self.driver.validate_v3_token(token_id) + # NOTE(lbragstad): This will validate v2 and v3 non-persistent + # tokens. + return self.driver.validate_non_persistent_token(token_id) token_ref = self._persistence.get_token(token_id) version = self.get_token_version(token_ref) if version == self.V3: @@ -577,6 +579,17 @@ class Provider(object): """ raise exception.NotImplemented() # pragma: no cover + @abc.abstractmethod + def validate_non_persistent_token(self, token_id): + """Validate a given non-persistent token id and return the token_data. + + :param token_id: the token id + :type token_id: string + :returns: token data + :raises keystone.exception.TokenNotFound: When the token is invalid + """ + raise exception.NotImplemented() # pragma: no cover + @abc.abstractmethod def validate_v3_token(self, token_ref): """Validate the given V3 token and return the token_data. diff --git a/keystone/token/providers/common.py b/keystone/token/providers/common.py index 06f8f5ec61..df7d27c33f 100644 --- a/keystone/token/providers/common.py +++ b/keystone/token/providers/common.py @@ -678,6 +678,43 @@ class BaseProvider(provider.Provider): token_id = token_ref['token_data']['access']['token']['id'] raise exception.TokenNotFound(token_id=token_id) + def validate_non_persistent_token(self, token_id): + try: + (user_id, methods, audit_ids, domain_id, project_id, trust_id, + federated_info, created_at, expires_at) = ( + self.token_formatter.validate_token(token_id)) + except exception.ValidationError as e: + raise exception.TokenNotFound(e) + + token_dict = None + trust_ref = None + if federated_info: + # NOTE(lbragstad): We need to rebuild information about the + # federated token as well as the federated token roles. This is + # because when we validate a non-persistent token, we don't have a + # token reference to pull the federated token information out of. + # As a result, we have to extract it from the token itself and + # rebuild the federated context. These private methods currently + # live in the keystone.token.providers.fernet.Provider() class. + token_dict = self._rebuild_federated_info(federated_info, user_id) + if project_id or domain_id: + self._rebuild_federated_token_roles(token_dict, federated_info, + user_id, project_id, + domain_id) + if trust_id: + trust_ref = self.trust_api.get_trust(trust_id) + + return self.v3_token_data_helper.get_token_data( + user_id, + method_names=methods, + domain_id=domain_id, + project_id=project_id, + issued_at=created_at, + expires=expires_at, + trust=trust_ref, + token=token_dict, + audit_info=audit_ids) + def validate_v3_token(self, token_ref): # FIXME(gyee): performance or correctness? Should we return the # cached token or reconstruct it? Obviously if we are going with diff --git a/keystone/token/providers/fernet/core.py b/keystone/token/providers/fernet/core.py index 631afae5a2..63cd4ca72d 100644 --- a/keystone/token/providers/fernet/core.py +++ b/keystone/token/providers/fernet/core.py @@ -230,45 +230,6 @@ class Provider(common.BaseProvider): token_data['access']['token']['id'] = token_ref return token_data - def validate_v3_token(self, token): - """Validate a V3 formatted token. - - :param token: a string describing the token to validate - :returns: the token data - :raises keystone.exception.TokenNotFound: if token format version isn't - supported - - """ - try: - (user_id, methods, audit_ids, domain_id, project_id, trust_id, - federated_info, created_at, expires_at) = ( - self.token_formatter.validate_token(token)) - except exception.ValidationError: - raise exception.TokenNotFound(token_id=token) - - token_dict = None - if federated_info: - token_dict = self._rebuild_federated_info(federated_info, user_id) - if project_id or domain_id: - self._rebuild_federated_token_roles(token_dict, federated_info, - user_id, project_id, - domain_id) - - trust_ref = None - if trust_id: - trust_ref = self.trust_api.get_trust(trust_id) - - return self.v3_token_data_helper.get_token_data( - user_id, - method_names=methods, - domain_id=domain_id, - project_id=project_id, - issued_at=created_at, - expires=expires_at, - trust=trust_ref, - token=token_dict, - audit_info=audit_ids) - def _get_token_id(self, token_data): """Generate the token_id based upon the data in token_data. diff --git a/keystone/token/providers/fernet/token_formatters.py b/keystone/token/providers/fernet/token_formatters.py index 5db517a146..79d3c5d713 100644 --- a/keystone/token/providers/fernet/token_formatters.py +++ b/keystone/token/providers/fernet/token_formatters.py @@ -99,7 +99,7 @@ class TokenFormatter(object): return self.crypto.decrypt(token.encode('utf-8')) except fernet.InvalidToken: raise exception.ValidationError( - _('This is not a recognized Fernet token')) + _('This is not a recognized Fernet token %s') % token) @classmethod def restore_padding(cls, token):