Implement Caching for Token Revocation List

Based upon the Keystone caching (using dogpile.cache) implementation
token revocation list caching is implemented in this patchset.

The following methods are cached:
    * token_api.list_revoked_tokens

Calls to token_api.delete_token and token_api.delete_tokens will
properly invalidate the cache for the revocation list.

Reworked some of the caching tests to allow for more in-depth
tests of the cache layer.

DocImpact

partial-blueprint: caching-layer-for-driver-calls
Change-Id: I2bc821fa68035884dfb885b17c051f3023e7a9f6
This commit is contained in:
Morgan Fainberg 2013-08-23 19:06:42 -07:00
parent 369be47fdb
commit 9400658a1c
11 changed files with 110 additions and 35 deletions

View File

@ -250,6 +250,13 @@ behavior is that subsystem caching is enabled, but the global toggle is set to d
leave this set to True. If the cache backend provides a key-mangler, this
option has no effect.
Current keystone systems that have caching capabilities:
* ``token``
The Token Revocation List cache time is handled by the configuration option
``revocation_cache_time`` in the ``[token]`` section. The revocation
list is refreshed whenever a token is revoked, and sees significantly more
requests than specific tokens or token validation of specific tokens will see.
For more information about the different backends (and configuration options):
* `dogpile.cache.backends.memory`_
* `dogpile.cache.backends.memcached`_

View File

@ -160,6 +160,13 @@
# mode e.g. kerberos or x509 to require binding to that authentication.
# enforce_token_bind = permissive
# Token specific caching toggle. This has no effect unless the global caching
# option is set to True
# caching = True
# Revocation-List specific cache time-to-live (TTL) in seconds.
# revocation_cache_time = 3600
[cache]
# Global cache functionality toggle.
# enabled = False

View File

@ -21,7 +21,7 @@ import dogpile.cache
from dogpile.cache import proxy
from dogpile.cache import util
from keystone.common import config
from keystone import config
from keystone import exception
from keystone.openstack.common import importutils
from keystone.openstack.common import log
@ -31,6 +31,7 @@ CONF = config.CONF
LOG = log.getLogger(__name__)
REGION = dogpile.cache.make_region()
make_region = dogpile.cache.make_region
on_arguments = REGION.cache_on_arguments
dogpile.cache.register_backend(
@ -71,17 +72,17 @@ class DebugProxy(proxy.ProxyBackend):
self.proxied.delete_multi(keys)
def build_cache_config(conf):
def build_cache_config():
"""Build the cache region dictionary configuration.
:param conf: configuration object for keystone
:returns: dict
"""
prefix = conf.cache.config_prefix
prefix = CONF.cache.config_prefix
conf_dict = {}
conf_dict['%s.backend' % prefix] = conf.cache.backend
conf_dict['%s.expiration_time' % prefix] = conf.cache.expiration_time
for argument in conf.cache.backend_argument:
conf_dict['%s.backend' % prefix] = CONF.cache.backend
conf_dict['%s.expiration_time' % prefix] = CONF.cache.expiration_time
for argument in CONF.cache.backend_argument:
try:
(argname, argvalue) = argument.split(':', 1)
except ValueError:
@ -98,33 +99,28 @@ def build_cache_config(conf):
return conf_dict
def configure_cache_region(conf, region=None):
def configure_cache_region(region):
"""Configure a cache region.
:param conf: global keystone configuration object
:param region: optional CacheRegion object, if not provided a new region
will be instantiated
:raises: exception.ValidationError
:returns: dogpile.cache.CacheRegion
"""
if region is not None and not isinstance(region,
dogpile.cache.CacheRegion):
if not isinstance(region, dogpile.cache.CacheRegion):
raise exception.ValidationError(
_('region not type dogpile.cache.CacheRegion'))
if region is None:
region = dogpile.cache.make_region()
if 'backend' not in region.__dict__:
# NOTE(morganfainberg): this is how you tell if a region is configured.
# There is a request logged with dogpile.cache upstream to make this
# easier / less ugly.
config_dict = build_cache_config(conf)
config_dict = build_cache_config()
region.configure_from_config(config_dict,
'%s.' % conf.cache.config_prefix)
'%s.' % CONF.cache.config_prefix)
if conf.cache.debug_cache_backend:
if CONF.cache.debug_cache_backend:
region.wrap(DebugProxy)
# NOTE(morganfainberg): if the backend requests the use of a
@ -138,7 +134,7 @@ def configure_cache_region(conf, region=None):
if CONF.cache.use_key_mangler:
region.key_mangler = util.sha1_mangle_key
for class_path in conf.cache.proxies:
for class_path in CONF.cache.proxies:
# NOTE(morganfainberg): if we have any proxy wrappers, we should
# ensure they are added to the cache region's backend. Since
# configure_from_config doesn't handle the wrap argument, we need

View File

@ -70,7 +70,9 @@ FILE_OPTIONS = {
cfg.IntOpt('expiration', default=86400),
cfg.StrOpt('provider', default=None),
cfg.StrOpt('driver',
default='keystone.token.backends.sql.Token')],
default='keystone.token.backends.sql.Token'),
cfg.BoolOpt('caching', default=True),
cfg.IntOpt('revocation_cache_time', default=3600)],
'cache': [
cfg.StrOpt('config_prefix', default='cache.keystone'),
cfg.IntOpt('expiration_time', default=600),

View File

@ -41,7 +41,7 @@ LOG = logging.getLogger(__name__)
# Ensure the cache is configured and built before we instantiate the managers
cache.configure_cache_region(CONF, cache.REGION)
cache.configure_cache_region(cache.REGION)
# Ensure that the identity driver is created before the assignment manager.
# The default assignment driver is determined by the identity driver, so the

View File

@ -218,6 +218,8 @@ class TestCase(NoModule, unittest.TestCase):
super(TestCase, self).setUp()
self.config([etcdir('keystone.conf.sample'),
testsdir('test_overrides.conf')])
# ensure the cache region instance is setup
cache.configure_cache_region(cache.REGION)
self.mox = mox.Mox()
self.opt(policy_file=etcdir('policy.json'))
self.stubs = stubout.StubOutForTesting()
@ -233,6 +235,10 @@ class TestCase(NoModule, unittest.TestCase):
self.stubs.UnsetAll()
self.stubs.SmartUnsetAll()
self.mox.VerifyAll()
# NOTE(morganfainberg): The only way to reconfigure the
# CacheRegion object on each setUp() call is to remove the
# .backend property.
del cache.REGION.backend
super(TestCase, self).tearDown()
finally:
for path in self._paths:
@ -277,16 +283,6 @@ class TestCase(NoModule, unittest.TestCase):
setattr(self, manager_name, manager.Manager())
# NOTE(morganfainberg): ensure the cache region is setup. It is safe
# to call configure_cache_region on the same region multiple times.
# The region wont be configured more than one time.
cache.configure_cache_region(CONF, cache.REGION)
# Invalidate all cache between tests. This should probably be extended
# to purge the cache_region's backend as well to avoid odd memory bloat
# during a given test sequence since we use dogpile.cache.memory.
cache.REGION.invalidate()
dependency.resolve_future_dependencies()
def load_fixtures(self, fixtures):

View File

@ -2403,6 +2403,41 @@ class TokenTests(object):
self.assertEqual(len(tokens), 1)
self.assertIn(token_id, tokens)
def test_revocation_list_cache(self):
expire_time = timeutils.utcnow() + datetime.timedelta(minutes=10)
token_id = uuid.uuid4().hex
token_data = {'id_hash': token_id, 'id': token_id, 'a': 'b',
'expires': expire_time,
'trust_id': None,
'user': {'id': 'testuserid'}}
token2_id = uuid.uuid4().hex
token2_data = {'id_hash': token2_id, 'id': token2_id, 'a': 'b',
'expires': expire_time,
'trust_id': None,
'user': {'id': 'testuserid'}}
# Create 2 Tokens.
self.token_api.create_token(token_id, token_data)
self.token_api.create_token(token2_id, token2_data)
# Verify the revocation list is empty.
self.assertEquals([], self.token_api.list_revoked_tokens())
# Delete a token directly, bypassing the manager.
self.token_api.driver.delete_token(token_id)
# Verify the revocation list is still empty.
self.assertEquals([], self.token_api.list_revoked_tokens())
# Invalidate the revocation list.
self.token_api.invalidate_revocation_list()
# Verify the deleted token is in the revocation list.
revoked_tokens = [x['id']
for x in self.token_api.list_revoked_tokens()]
self.assertIn(token_id, revoked_tokens)
# Delete the second token, through the manager
self.token_api.delete_token(token2_id)
revoked_tokens = [x['id']
for x in self.token_api.list_revoked_tokens()]
# Verify both tokens are in the revocation list.
self.assertIn(token_id, revoked_tokens)
self.assertIn(token2_id, revoked_tokens)
class TrustTests(object):
def create_sample_trust(self, new_id):

View File

@ -18,7 +18,7 @@ from dogpile.cache import api
from dogpile.cache import proxy
from keystone.common import cache
from keystone.common import config
from keystone import config
from keystone.tests import core as test
@ -44,7 +44,8 @@ class TestProxyValue(object):
class CacheRegionTest(test.TestCase):
def setUp(self):
super(CacheRegionTest, self).setUp()
self.region = cache.configure_cache_region(CONF)
self.region = cache.make_region()
cache.configure_cache_region(self.region)
self.region.wrap(TestProxy)
def test_region_built_with_proxy_direct_cache_test(self):
@ -56,7 +57,8 @@ class CacheRegionTest(test.TestCase):
def test_cache_region_no_error_multiple_config(self):
"""Verify configuring the CacheRegion again doesn't error."""
cache.configure_cache_region(CONF, self.region)
cache.configure_cache_region(self.region)
cache.configure_cache_region(self.region)
def test_should_cache_fn(self):
"""Verify should_cache_fn generates a sane function."""
@ -84,7 +86,7 @@ class CacheRegionTest(test.TestCase):
CONF.cache.backend_argument = ['arg1:test', 'arg2:test:test',
'arg3.invalid']
config_dict = cache.build_cache_config(CONF)
config_dict = cache.build_cache_config()
self.assertEquals(
config_dict['test_prefix.backend'], CONF.cache.backend)
self.assertEquals(

View File

@ -17,6 +17,7 @@ driver = keystone.token.backends.kvs.Token
[cache]
backend = dogpile.cache.memory
enabled = True
debug_cache_backend = True
[oauth1]
driver = keystone.contrib.oauth1.backends.kvs.OAuth1

View File

@ -5,6 +5,7 @@ from lxml import etree
import webtest
from keystone import auth
from keystone.common import cache
from keystone.common import serializer
from keystone import config
from keystone.openstack.common import timeutils
@ -47,6 +48,9 @@ class RestfulTestCase(test_content_types.RestfulTestCase):
self.config(self.config_files())
self.setup_database()
# ensure the cache region instance is setup
cache.configure_cache_region(cache.REGION)
self.load_backends()
self.public_app = webtest.TestApp(
@ -126,6 +130,10 @@ class RestfulTestCase(test_content_types.RestfulTestCase):
self.public_server = None
self.admin_server = None
self.teardown_database()
# NOTE(morganfainberg): The only way to reconfigure the
# CacheRegion object on each setUp() call is to remove the
# .backend property.
del cache.REGION.backend
# need to reset the plug-ins
auth.controllers.AUTH_METHODS = {}
#drop the policy rules

View File

@ -19,6 +19,7 @@
import copy
import datetime
from keystone.common import cache
from keystone.common import cms
from keystone.common import dependency
from keystone.common import manager
@ -29,8 +30,8 @@ from keystone.openstack.common import timeutils
CONF = config.CONF
LOG = logging.getLogger(__name__)
SHOULD_CACHE = cache.should_cache_fn('token')
def default_expire_time():
@ -123,7 +124,27 @@ class Manager(manager.Manager):
return self.driver.create_token(self._unique_id(token_id), data_copy)
def delete_token(self, token_id):
return self.driver.delete_token(self._unique_id(token_id))
self.driver.delete_token(self._unique_id(token_id))
self.invalidate_revocation_list()
def delete_tokens(self, user_id, tenant_id=None, trust_id=None,
consumer_id=None):
self.driver.delete_tokens(user_id, tenant_id, trust_id, consumer_id)
self.invalidate_revocation_list()
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.token.revocation_cache_time)
def list_revoked_tokens(self):
return self.driver.list_revoked_tokens()
def invalidate_revocation_list(self):
# NOTE(morganfainberg): we should always be keeping the revoked tokens
# list in memory, calling refresh in this case instead of ensures a
# cache hit when list_revoked_tokens is called again. This is an
# exception to the rule. Note that ``self`` needs to be passed to
# refresh() because of the way the invalidation/refresh methods work on
# determining cache-keys.
self.list_revoked_tokens.refresh(self)
class Driver(object):