From 693a86f2a17c0d7871e5700257942badd8090533 Mon Sep 17 00:00:00 2001 From: Lance Bragstad Date: Wed, 27 Jun 2018 14:10:57 +0000 Subject: [PATCH] Add serialization for TokenModel object Future changes are going to rely on an instance of TokenModel through out the token provider API instead of a dictionary. We need to serialize the new object so that we can cache it like we do with dictionaries. This change addes a handler that serializes instances of TokenModel before putting them into the cache backend and reinflates them back into TokenModel objects on the way out. Partial-Bug: 1778945 Change-Id: I3d8def90b035616a21edfc0ed42f43fcbd76fe23 --- keystone/exception.py | 10 +++ keystone/models/token_model.py | 32 +++++++++ .../unit/token/test_token_serialization.py | 70 +++++++++++++++++++ 3 files changed, 112 insertions(+) create mode 100644 keystone/tests/unit/token/test_token_serialization.py diff --git a/keystone/exception.py b/keystone/exception.py index cac8ba7ffb..a392cfaa27 100644 --- a/keystone/exception.py +++ b/keystone/exception.py @@ -652,3 +652,13 @@ class LDAPSizeLimitExceeded(UnexpectedError): message_format = _('Number of User/Group entities returned by LDAP ' 'exceeded size limit. Contact your LDAP ' 'administrator.') + + +class CacheDeserializationError(Exception): + + def __init__(self, obj, data): + super(CacheDeserializationError, self).__init__( + _('Failed to deserialize %(obj)s. Data is %(data)s') % { + 'obj': obj, 'data': data + } + ) diff --git a/keystone/models/token_model.py b/keystone/models/token_model.py index f9769d2672..79882a1a5d 100644 --- a/keystone/models/token_model.py +++ b/keystone/models/token_model.py @@ -15,10 +15,12 @@ import itertools from oslo_log import log +from oslo_serialization import msgpackutils from oslo_utils import reflection from oslo_utils import timeutils import six +from keystone.common import cache from keystone.common import provider_api import keystone.conf from keystone import exception @@ -813,3 +815,33 @@ class TokenModel(object): self.id = token_id self.issued_at = issued_at + + +class _TokenModelHandler(object): + identity = 126 + handles = (TokenModel,) + + def __init__(self, registry): + self._registry = registry + + def serialize(self, obj): + serialized = msgpackutils.dumps(obj.__dict__, registry=self._registry) + return serialized + + def deserialize(self, data): + token_data = msgpackutils.loads(data, registry=self._registry) + try: + token_model = TokenModel() + for k, v in iter(token_data.items()): + setattr(token_model, k, v) + except Exception: + LOG.debug( + "Failed to deserialize TokenModel. Data is %s", token_data + ) + raise exception.CacheDeserializationError( + TokenModel.__name__, token_data + ) + return token_model + + +cache.register_model_handler(_TokenModelHandler) diff --git a/keystone/tests/unit/token/test_token_serialization.py b/keystone/tests/unit/token/test_token_serialization.py new file mode 100644 index 0000000000..99b2adff03 --- /dev/null +++ b/keystone/tests/unit/token/test_token_serialization.py @@ -0,0 +1,70 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import datetime +import uuid + +import mock + +from keystone.common.cache import _context_cache +from keystone.common import utils as ks_utils +from keystone import exception +from keystone.models import token_model +from keystone.tests.unit import base_classes + + +class TestTokenSerialization(base_classes.TestCaseWithBootstrap): + + def setUp(self): + super(TestTokenSerialization, self).setUp() + self.admin_user_id = self.bootstrapper.admin_user_id + self.admin_username = self.bootstrapper.admin_username + self.admin_password = self.bootstrapper.admin_password + self.project_id = self.bootstrapper.project_id + self.project_name = self.bootstrapper.project_name + self.admin_role_id = self.bootstrapper.admin_role_id + self.member_role_id = self.bootstrapper.member_role_id + self.reader_role_id = self.bootstrapper.reader_role_id + + self.token_id = uuid.uuid4().hex + issued_at = datetime.datetime.utcnow() + self.issued_at = ks_utils.isotime(at=issued_at, subsecond=True) + + # Reach into the cache registry and pull out an instance of the + # _TokenModelHandler so that we can interact and test it directly (as + # opposed to using PROVIDERS or managers to invoke it). + token_handler_id = token_model._TokenModelHandler.identity + self.token_handler = _context_cache._registry.get(token_handler_id) + + self.exp_token = token_model.TokenModel() + self.exp_token.user_id = self.admin_user_id + self.exp_token.project_id = self.project_id + self.exp_token.mint(self.token_id, self.issued_at) + + def test_serialize_and_deserialize_token_model(self): + serialized = self.token_handler.serialize(self.exp_token) + token = self.token_handler.deserialize(serialized) + + self.assertEqual(self.exp_token.user_id, token.user_id) + self.assertEqual(self.exp_token.project_id, token.project_id) + self.assertEqual(self.exp_token.id, token.id) + self.assertEqual(self.exp_token.issued_at, token.issued_at) + + @mock.patch.object( + token_model.TokenModel, '__init__', side_effect=Exception) + def test_error_handling_in_deserialize(self, handler_mock): + serialized = self.token_handler.serialize(self.exp_token) + self.assertRaises( + exception.CacheDeserializationError, + self.token_handler.deserialize, + serialized + )