Merge "Implement caching for Tokens and Token Validation"

This commit is contained in:
Jenkins 2013-08-30 19:55:47 +00:00 committed by Gerrit Code Review
commit 292037460e
8 changed files with 121 additions and 31 deletions

View File

@ -255,10 +255,15 @@ behavior is that subsystem caching is enabled, but the global toggle is set to d
Current keystone systems that have caching capabilities: Current keystone systems that have caching capabilities:
* ``token`` * ``token``
The token system has a separate ``cache_time`` configuration option, that
can be set to a value above or below the global ``expiration_time`` default,
allowing for different caching behavior from the other systems in ``Keystone``.
This option is set in the ``[token]`` section of the configuration file.
The Token Revocation List cache time is handled by the configuration option The Token Revocation List cache time is handled by the configuration option
``revocation_cache_time`` in the ``[token]`` section. The revocation ``revocation_cache_time`` in the ``[token]`` section. The revocation
list is refreshed whenever a token is revoked, and sees significantly more list is refreshed whenever a token is revoked. It typically sees significantly
requests than specific tokens or token validation of specific tokens will see. more requests than specific token retrievals or token validation calls.
For more information about the different backends (and configuration options): For more information about the different backends (and configuration options):
* `dogpile.cache.backends.memory`_ * `dogpile.cache.backends.memory`_

View File

@ -164,6 +164,9 @@
# option is set to True # option is set to True
# caching = True # caching = True
# Token specific cache time-to-live (TTL) in seconds.
# cache_time =
# Revocation-List specific cache time-to-live (TTL) in seconds. # Revocation-List specific cache time-to-live (TTL) in seconds.
# revocation_cache_time = 3600 # revocation_cache_time = 3600

View File

@ -43,18 +43,18 @@ class DebugProxy(proxy.ProxyBackend):
"""Extra Logging ProxyBackend.""" """Extra Logging ProxyBackend."""
def get(self, key): def get(self, key):
value = self.proxied.get(key) value = self.proxied.get(key)
msg = _('CACHE_GET: Key: "%(key)s)" "%(value)s"') msg = _('CACHE_GET: Key: "%(key)s" Value: "%(value)s"')
LOG.debug(msg % {'key': key, 'value': value}) LOG.debug(msg % {'key': key, 'value': value})
return value return value
def get_multi(self, keys): def get_multi(self, keys):
values = self.proxied.get_multi(keys) values = self.proxied.get_multi(keys)
msg = _('CACHE_GET_MULTI: "%(key)s" "%(value)s"') msg = _('CACHE_GET_MULTI: "%(keys)s" Values: "%(values)s"')
LOG.debug(msg % {'keys': keys, 'values': values}) LOG.debug(msg % {'keys': keys, 'values': values})
return values return values
def set(self, key, value): def set(self, key, value):
msg = _('CACHE_SET: Key: %(key)s Value: %(value)s') msg = _('CACHE_SET: Key: "%(key)s" Value: "%(value)s"')
LOG.debug(msg % {'key': key, 'value': value}) LOG.debug(msg % {'key': key, 'value': value})
return self.proxied.set(key, value) return self.proxied.set(key, value)

View File

@ -72,7 +72,8 @@ FILE_OPTIONS = {
cfg.StrOpt('driver', cfg.StrOpt('driver',
default='keystone.token.backends.sql.Token'), default='keystone.token.backends.sql.Token'),
cfg.BoolOpt('caching', default=True), cfg.BoolOpt('caching', default=True),
cfg.IntOpt('revocation_cache_time', default=3600)], cfg.IntOpt('revocation_cache_time', default=3600),
cfg.IntOpt('cache_time', default=None)],
'cache': [ 'cache': [
cfg.StrOpt('config_prefix', default='cache.keystone'), cfg.StrOpt('config_prefix', default='cache.keystone'),
cfg.IntOpt('expiration_time', default=600), cfg.IntOpt('expiration_time', default=600),

View File

@ -42,15 +42,9 @@ class Token(kvs.Base, token.Driver):
def get_token(self, token_id): def get_token(self, token_id):
try: try:
ref = self.db.get('token-%s' % token_id) ref = self.db.get('token-%s' % token_id)
except exception.NotFound:
raise exception.TokenNotFound(token_id=token_id)
now = timeutils.utcnow()
expiry = ref['expires']
if expiry is None:
raise exception.TokenNotFound(token_id=token_id)
if expiry > now:
return copy.deepcopy(ref) return copy.deepcopy(ref)
else: except Exception:
# On any issues here, Token is not found.
raise exception.TokenNotFound(token_id=token_id) raise exception.TokenNotFound(token_id=token_id)
def create_token(self, token_id, data): def create_token(self, token_id, data):

View File

@ -15,7 +15,6 @@
# under the License. # under the License.
import copy import copy
import datetime
from keystone.common import sql from keystone.common import sql
from keystone import exception from keystone import exception
@ -45,13 +44,8 @@ class Token(sql.Base, token.Driver):
raise exception.TokenNotFound(token_id=token_id) raise exception.TokenNotFound(token_id=token_id)
session = self.get_session() session = self.get_session()
token_ref = session.query(TokenModel).get(token_id) token_ref = session.query(TokenModel).get(token_id)
now = datetime.datetime.utcnow()
if not token_ref or not token_ref.valid: if not token_ref or not token_ref.valid:
raise exception.TokenNotFound(token_id=token_id) raise exception.TokenNotFound(token_id=token_id)
if not token_ref.expires:
raise exception.TokenNotFound(token_id=token_id)
if now >= token_ref.expires:
raise exception.TokenNotFound(token_id=token_id)
return token_ref.to_dict() return token_ref.to_dict()
def create_token(self, token_id, data): def create_token(self, token_id, data):

View File

@ -91,6 +91,7 @@ def validate_auth_info(self, user_ref, tenant_ref):
raise exception.Unauthorized(msg) raise exception.Unauthorized(msg)
@dependency.requires('token_provider_api')
@dependency.provider('token_api') @dependency.provider('token_api')
class Manager(manager.Manager): class Manager(manager.Manager):
"""Default pivot point for the Token backend. """Default pivot point for the Token backend.
@ -103,7 +104,7 @@ class Manager(manager.Manager):
def __init__(self): def __init__(self):
super(Manager, self).__init__(CONF.token.driver) super(Manager, self).__init__(CONF.token.driver)
def _unique_id(self, token_id): def unique_id(self, token_id):
"""Return a unique ID for a token. """Return a unique ID for a token.
The returned value is useful as the primary key of a database table, The returned value is useful as the primary key of a database table,
@ -115,21 +116,55 @@ class Manager(manager.Manager):
""" """
return cms.cms_hash_token(token_id) return cms.cms_hash_token(token_id)
def _assert_valid(self, token_id, token_ref):
"""Raise TokenNotFound if the token is expired."""
current_time = timeutils.normalize_time(timeutils.utcnow())
expires = token_ref.get('expires')
if not expires or current_time > timeutils.normalize_time(expires):
raise exception.TokenNotFound(token_id=token_id)
def get_token(self, token_id): def get_token(self, token_id):
return self.driver.get_token(self._unique_id(token_id)) unique_id = self.unique_id(token_id)
token_ref = self._get_token(unique_id)
# NOTE(morganfainberg): Lift expired checking to the manager, there is
# no reason to make the drivers implement this check. With caching,
# self._get_token could return an expired token. Make sure we behave
# as expected and raise TokenNotFound on those instances.
self._assert_valid(token_id, token_ref)
return token_ref
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.token.cache_time)
def _get_token(self, token_id):
# Only ever use the "unique" id in the cache key.
return self.driver.get_token(token_id)
def create_token(self, token_id, data): def create_token(self, token_id, data):
unique_id = self.unique_id(token_id)
data_copy = copy.deepcopy(data) data_copy = copy.deepcopy(data)
data_copy['id'] = self._unique_id(token_id) data_copy['id'] = unique_id
return self.driver.create_token(self._unique_id(token_id), data_copy) ret = self.driver.create_token(unique_id, data_copy)
if SHOULD_CACHE(ret):
# NOTE(morganfainberg): when doing a cache set, you must pass the
# same arguments through, the same as invalidate (this includes
# "self"). First argument is always the value to be cached
self._get_token.set(ret, self, unique_id)
return ret
def delete_token(self, token_id): def delete_token(self, token_id):
self.driver.delete_token(self._unique_id(token_id)) unique_id = self.unique_id(token_id)
self.driver.delete_token(unique_id)
self._invalidate_individual_token_cache(unique_id)
self.invalidate_revocation_list() self.invalidate_revocation_list()
def delete_tokens(self, user_id, tenant_id=None, trust_id=None, def delete_tokens(self, user_id, tenant_id=None, trust_id=None,
consumer_id=None): consumer_id=None):
token_list = self.driver.list_tokens(user_id, tenant_id, trust_id,
consumer_id)
self.driver.delete_tokens(user_id, tenant_id, trust_id, consumer_id) self.driver.delete_tokens(user_id, tenant_id, trust_id, consumer_id)
for token_id in token_list:
unique_id = self.unique_id(token_id)
self._invalidate_individual_token_cache(unique_id, tenant_id)
self.invalidate_revocation_list() self.invalidate_revocation_list()
@cache.on_arguments(should_cache_fn=SHOULD_CACHE, @cache.on_arguments(should_cache_fn=SHOULD_CACHE,
@ -146,6 +181,19 @@ class Manager(manager.Manager):
# determining cache-keys. # determining cache-keys.
self.list_revoked_tokens.refresh(self) self.list_revoked_tokens.refresh(self)
def _invalidate_individual_token_cache(self, token_id, belongs_to=None):
# NOTE(morganfainberg): invalidate takes the exact same arguments as
# the normal method, this means we need to pass "self" in (which gets
# stripped off).
# FIXME(morganfainberg): Does this cache actually need to be
# invalidated? We maintain a cached revocation list, which should be
# consulted before accepting a token as valid. For now we will
# do the explicit individual token invalidation.
self._get_token.invalidate(self, token_id)
self.token_provider_api.invalidate_individual_token_cache(token_id,
belongs_to)
class Driver(object): class Driver(object):
"""Interface description for a Token driver.""" """Interface description for a Token driver."""

View File

@ -17,6 +17,7 @@
"""Token provider interface.""" """Token provider interface."""
from keystone.common import cache
from keystone.common import dependency from keystone.common import dependency
from keystone.common import manager from keystone.common import manager
from keystone import config from keystone import config
@ -27,6 +28,7 @@ from keystone.openstack.common import timeutils
CONF = config.CONF CONF = config.CONF
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
SHOULD_CACHE = cache.should_cache_fn('token')
# supported token versions # supported token versions
@ -101,17 +103,26 @@ class Manager(manager.Manager):
super(Manager, self).__init__(self.get_token_provider()) super(Manager, self).__init__(self.get_token_provider())
def validate_token(self, token_id, belongs_to=None): def validate_token(self, token_id, belongs_to=None):
token = self.driver.validate_token(token_id, belongs_to) unique_id = self.token_api.unique_id(token_id)
# NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
token = self._validate_token(unique_id, belongs_to)
self._is_valid_token(token) self._is_valid_token(token)
return token return token
def validate_v2_token(self, token_id, belongs_to=None): def validate_v2_token(self, token_id, belongs_to=None):
token = self.driver.validate_v2_token(token_id, belongs_to) unique_id = self.token_api.unique_id(token_id)
# NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
token = self._validate_v2_token(unique_id, belongs_to)
self._is_valid_token(token) self._is_valid_token(token)
return token return token
def validate_v3_token(self, token_id): def validate_v3_token(self, token_id):
token = self.driver.validate_v3_token(token_id) unique_id = self.token_api.unique_id(token_id)
# NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
token = self._validate_v3_token(unique_id)
self._is_valid_token(token) self._is_valid_token(token)
return token return token
@ -123,7 +134,10 @@ class Manager(manager.Manager):
:returns: None :returns: None
:raises: keystone.exception.Unauthorized :raises: keystone.exception.Unauthorized
""" """
self.validate_v2_token(token_id) # NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
unique_id = self.token_api.unique_id(token_id)
self.validate_v2_token(unique_id, belongs_to=belongs_to)
def check_v3_token(self, token_id): def check_v3_token(self, token_id):
"""Check the validity of the given V3 token. """Check the validity of the given V3 token.
@ -132,11 +146,28 @@ class Manager(manager.Manager):
:returns: None :returns: None
:raises: keystone.exception.Unauthorized :raises: keystone.exception.Unauthorized
""" """
self.validate_v3_token(token_id) # NOTE(morganfainberg): Ensure we never use the long-form token_id
# (PKI) as part of the cache_key.
unique_id = self.token_api.unique_id(token_id)
self.validate_v3_token(unique_id)
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.token.cache_time)
def _validate_token(self, token_id, belongs_to=None):
return self.driver.validate_token(token_id, belongs_to)
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.token.cache_time)
def _validate_v2_token(self, token_id, belongs_to=None):
return self.driver.validate_v2_token(token_id, belongs_to)
@cache.on_arguments(should_cache_fn=SHOULD_CACHE,
expiration_time=CONF.token.cache_time)
def _validate_v3_token(self, token_id):
return self.driver.validate_v3_token(token_id)
def _is_valid_token(self, token): def _is_valid_token(self, token):
# Verify the token has not expired. # Verify the token has not expired.
current_time = timeutils.normalize_time(timeutils.utcnow()) current_time = timeutils.normalize_time(timeutils.utcnow())
try: try:
@ -159,6 +190,20 @@ class Manager(manager.Manager):
# Token is expired, we have a malformed token, or something went wrong. # Token is expired, we have a malformed token, or something went wrong.
raise exception.Unauthorized(_('Failed to validate token')) raise exception.Unauthorized(_('Failed to validate token'))
def invalidate_individual_token_cache(self, token_id, belongs_to=None):
# NOTE(morganfainberg): invalidate takes the exact same arguments as
# the normal method, this means we need to pass "self" in (which gets
# stripped off).
# FIXME(morganfainberg): Does this cache actually need to be
# invalidated? We maintain a cached revocation list, which should be
# consulted before accepting a token as valid. For now we will
# do the explicit individual token invalidation.
self._validate_v3_token.invalidate(self, token_id)
self._validate_v2_token.invalidate(self, token_id)
self._validate_v2_token.invalidate(self, token_id, belongs_to)
self._validate_token.invalidate(self, token_id, belongs_to)
class Provider(object): class Provider(object):
"""Interface description for a Token provider.""" """Interface description for a Token provider."""