Move files into the right spot and get tests working

* Move files from keystone folder into appropriate folders
* Move tests from keystone into appropriate folders
* Fix imports to use the new module locations
* Add a _i18n.py to help with localication
* Add a BaseTestcase class
* Add a exceptions module

All just enough to run the tests. Though some tests were commented
with a FIXME to be fixed in later commits.

A few additional tests were not working with py34 and they are
skipped using testtools.skipIf statements.

Change-Id: Ib494d2a960cab5959c2adaca1eb0739fe65d14f1
This commit is contained in:
Davanum Srinivas 2015-06-18 17:59:17 -04:00
parent b6dfec2db2
commit 864dfacced
16 changed files with 657 additions and 395 deletions

View File

@ -1,15 +0,0 @@
# Copyright 2013 Metacloud
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.common.cache.core import * # noqa

View File

@ -1,322 +0,0 @@
# Copyright 2013 Metacloud
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import time
import uuid
from dogpile.cache import api
from dogpile.cache import proxy
import mock
from oslo_config import cfg
from keystone.common import cache
from keystone import exception
from keystone.tests import unit as tests
CONF = cfg.CONF
NO_VALUE = api.NO_VALUE
def _copy_value(value):
if value is not NO_VALUE:
value = copy.deepcopy(value)
return value
# NOTE(morganfainberg): WARNING - It is not recommended to use the Memory
# backend for dogpile.cache in a real deployment under any circumstances. The
# backend does no cleanup of expired values and therefore will leak memory. The
# backend is not implemented in a way to share data across processes (e.g.
# Keystone in HTTPD. This proxy is a hack to get around the lack of isolation
# of values in memory. Currently it blindly stores and retrieves the values
# from the cache, and modifications to dicts/lists/etc returned can result in
# changes to the cached values. In short, do not use the dogpile.cache.memory
# backend unless you are running tests or expecting odd/strange results.
class CacheIsolatingProxy(proxy.ProxyBackend):
"""Proxy that forces a memory copy of stored values.
The default in-memory cache-region does not perform a copy on values it
is meant to cache. Therefore if the value is modified after set or after
get, the cached value also is modified. This proxy does a copy as the last
thing before storing data.
"""
def get(self, key):
return _copy_value(self.proxied.get(key))
def set(self, key, value):
self.proxied.set(key, _copy_value(value))
class TestProxy(proxy.ProxyBackend):
def get(self, key):
value = _copy_value(self.proxied.get(key))
if value is not NO_VALUE:
if isinstance(value[0], TestProxyValue):
value[0].cached = True
return value
class TestProxyValue(object):
def __init__(self, value):
self.value = value
self.cached = False
class CacheRegionTest(tests.TestCase):
def setUp(self):
super(CacheRegionTest, self).setUp()
self.region = cache.make_region()
cache.configure_cache_region(self.region)
self.region.wrap(TestProxy)
self.test_value = TestProxyValue('Decorator Test')
def _add_test_caching_option(self):
self.config_fixture.register_opt(
cfg.BoolOpt('caching', default=True), group='cache')
def _get_cacheable_function(self):
with mock.patch.object(cache.REGION, 'cache_on_arguments',
self.region.cache_on_arguments):
memoize = cache.get_memoization_decorator(section='cache')
@memoize
def cacheable_function(value):
return value
return cacheable_function
def test_region_built_with_proxy_direct_cache_test(self):
# Verify cache regions are properly built with proxies.
test_value = TestProxyValue('Direct Cache Test')
self.region.set('cache_test', test_value)
cached_value = self.region.get('cache_test')
self.assertTrue(cached_value.cached)
def test_cache_region_no_error_multiple_config(self):
# Verify configuring the CacheRegion again doesn't error.
cache.configure_cache_region(self.region)
cache.configure_cache_region(self.region)
def _get_cache_fallthrough_fn(self, cache_time):
with mock.patch.object(cache.REGION, 'cache_on_arguments',
self.region.cache_on_arguments):
memoize = cache.get_memoization_decorator(
section='cache',
expiration_section='assignment')
class _test_obj(object):
def __init__(self, value):
self.test_value = value
@memoize
def get_test_value(self):
return self.test_value
def _do_test(value):
test_obj = _test_obj(value)
# Ensure the value has been cached
test_obj.get_test_value()
# Get the now cached value
cached_value = test_obj.get_test_value()
self.assertTrue(cached_value.cached)
self.assertEqual(value.value, cached_value.value)
self.assertEqual(cached_value.value, test_obj.test_value.value)
# Change the underlying value on the test object.
test_obj.test_value = TestProxyValue(uuid.uuid4().hex)
self.assertEqual(cached_value.value,
test_obj.get_test_value().value)
# override the system time to ensure the non-cached new value
# is returned
new_time = time.time() + (cache_time * 2)
with mock.patch.object(time, 'time',
return_value=new_time):
overriden_cache_value = test_obj.get_test_value()
self.assertNotEqual(cached_value.value,
overriden_cache_value.value)
self.assertEqual(test_obj.test_value.value,
overriden_cache_value.value)
return _do_test
def test_cache_no_fallthrough_expiration_time_fn(self):
# Since we do not re-configure the cache region, for ease of testing
# this value is set the same as the expiration_time default in the
# [cache] section
cache_time = 600
expiration_time = cache.get_expiration_time_fn('role')
do_test = self._get_cache_fallthrough_fn(cache_time)
# Run the test with the assignment cache_time value
self.config_fixture.config(cache_time=cache_time,
group='role')
test_value = TestProxyValue(uuid.uuid4().hex)
self.assertEqual(cache_time, expiration_time())
do_test(value=test_value)
def test_cache_fallthrough_expiration_time_fn(self):
# Since we do not re-configure the cache region, for ease of testing
# this value is set the same as the expiration_time default in the
# [cache] section
cache_time = 599
expiration_time = cache.get_expiration_time_fn('role')
do_test = self._get_cache_fallthrough_fn(cache_time)
# Run the test with the assignment cache_time value set to None and
# the global value set.
self.config_fixture.config(cache_time=None, group='role')
test_value = TestProxyValue(uuid.uuid4().hex)
self.assertIsNone(expiration_time())
do_test(value=test_value)
def test_should_cache_fn_global_cache_enabled(self):
# Verify should_cache_fn generates a sane function for subsystem and
# functions as expected with caching globally enabled.
cacheable_function = self._get_cacheable_function()
self.config_fixture.config(group='cache', enabled=True)
cacheable_function(self.test_value)
cached_value = cacheable_function(self.test_value)
self.assertTrue(cached_value.cached)
def test_should_cache_fn_global_cache_disabled(self):
# Verify should_cache_fn generates a sane function for subsystem and
# functions as expected with caching globally disabled.
cacheable_function = self._get_cacheable_function()
self.config_fixture.config(group='cache', enabled=False)
cacheable_function(self.test_value)
cached_value = cacheable_function(self.test_value)
self.assertFalse(cached_value.cached)
def test_should_cache_fn_global_cache_disabled_section_cache_enabled(self):
# Verify should_cache_fn generates a sane function for subsystem and
# functions as expected with caching globally disabled and the specific
# section caching enabled.
cacheable_function = self._get_cacheable_function()
self._add_test_caching_option()
self.config_fixture.config(group='cache', enabled=False)
self.config_fixture.config(group='cache', caching=True)
cacheable_function(self.test_value)
cached_value = cacheable_function(self.test_value)
self.assertFalse(cached_value.cached)
def test_should_cache_fn_global_cache_enabled_section_cache_disabled(self):
# Verify should_cache_fn generates a sane function for subsystem and
# functions as expected with caching globally enabled and the specific
# section caching disabled.
cacheable_function = self._get_cacheable_function()
self._add_test_caching_option()
self.config_fixture.config(group='cache', enabled=True)
self.config_fixture.config(group='cache', caching=False)
cacheable_function(self.test_value)
cached_value = cacheable_function(self.test_value)
self.assertFalse(cached_value.cached)
def test_should_cache_fn_global_cache_enabled_section_cache_enabled(self):
# Verify should_cache_fn generates a sane function for subsystem and
# functions as expected with caching globally enabled and the specific
# section caching enabled.
cacheable_function = self._get_cacheable_function()
self._add_test_caching_option()
self.config_fixture.config(group='cache', enabled=True)
self.config_fixture.config(group='cache', caching=True)
cacheable_function(self.test_value)
cached_value = cacheable_function(self.test_value)
self.assertTrue(cached_value.cached)
def test_cache_dictionary_config_builder(self):
"""Validate we build a sane dogpile.cache dictionary config."""
self.config_fixture.config(group='cache',
config_prefix='test_prefix',
backend='some_test_backend',
expiration_time=86400,
backend_argument=['arg1:test',
'arg2:test:test',
'arg3.invalid'])
config_dict = cache.build_cache_config()
self.assertEqual(
CONF.cache.backend, config_dict['test_prefix.backend'])
self.assertEqual(
CONF.cache.expiration_time,
config_dict['test_prefix.expiration_time'])
self.assertEqual('test', config_dict['test_prefix.arguments.arg1'])
self.assertEqual('test:test',
config_dict['test_prefix.arguments.arg2'])
self.assertNotIn('test_prefix.arguments.arg3', config_dict)
def test_cache_debug_proxy(self):
single_value = 'Test Value'
single_key = 'testkey'
multi_values = {'key1': 1, 'key2': 2, 'key3': 3}
self.region.set(single_key, single_value)
self.assertEqual(single_value, self.region.get(single_key))
self.region.delete(single_key)
self.assertEqual(NO_VALUE, self.region.get(single_key))
self.region.set_multi(multi_values)
cached_values = self.region.get_multi(multi_values.keys())
for value in multi_values.values():
self.assertIn(value, cached_values)
self.assertEqual(len(multi_values.values()), len(cached_values))
self.region.delete_multi(multi_values.keys())
for value in self.region.get_multi(multi_values.keys()):
self.assertEqual(NO_VALUE, value)
def test_configure_non_region_object_raises_error(self):
self.assertRaises(exception.ValidationError,
cache.configure_cache_region,
"bogus")
class CacheNoopBackendTest(tests.TestCase):
def setUp(self):
super(CacheNoopBackendTest, self).setUp()
self.region = cache.make_region()
cache.configure_cache_region(self.region)
def config_overrides(self):
super(CacheNoopBackendTest, self).config_overrides()
self.config_fixture.config(group='cache',
backend='keystone.common.cache.noop')
def test_noop_backend(self):
single_value = 'Test Value'
single_key = 'testkey'
multi_values = {'key1': 1, 'key2': 2, 'key3': 3}
self.region.set(single_key, single_value)
self.assertEqual(NO_VALUE, self.region.get(single_key))
self.region.set_multi(multi_values)
cached_values = self.region.get_multi(multi_values.keys())
self.assertEqual(len(cached_values), len(multi_values.values()))
for value in cached_values:
self.assertEqual(NO_VALUE, value)
# Delete should not raise exceptions
self.region.delete(single_key)
self.region.delete_multi(multi_values.keys())

35
oslo_cache/_i18n.py Normal file
View File

@ -0,0 +1,35 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='oslo.versionedobjects')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical

View File

@ -29,8 +29,8 @@ import memcache
from oslo_log import log
from six.moves import queue, zip
from keystone import exception
from keystone.i18n import _
from oslo_cache._i18n import _
from oslo_cache import exception
LOG = log.getLogger(__name__)

131
oslo_cache/_opts.py Normal file
View File

@ -0,0 +1,131 @@
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
FILE_OPTIONS = {
'cache': [
cfg.StrOpt('config_prefix', default='cache.keystone',
help='Prefix for building the configuration dictionary '
'for the cache region. This should not need to be '
'changed unless there is another dogpile.cache '
'region with the same configuration name.'),
cfg.IntOpt('expiration_time', default=600,
help='Default TTL, in seconds, for any cached item in '
'the dogpile.cache region. This applies to any '
'cached method that doesn\'t have an explicit '
'cache expiration time defined for it.'),
# NOTE(morganfainberg): the dogpile.cache.memory acceptable in devstack
# and other such single-process/thread deployments. Running
# dogpile.cache.memory in any other configuration has the same pitfalls
# as the KVS token backend. It is recommended that either Redis or
# Memcached are used as the dogpile backend for real workloads. To
# prevent issues with the memory cache ending up in "production"
# unintentionally, we register a no-op as the keystone default caching
# backend.
cfg.StrOpt('backend', default='oslo_cache.noop',
help='Dogpile.cache backend module. It is recommended '
'that Memcache with pooling '
'(keystone.cache.memcache_pool) or Redis '
'(dogpile.cache.redis) be used in production '
'deployments. Small workloads (single process) '
'like devstack can use the dogpile.cache.memory '
'backend.'),
cfg.MultiStrOpt('backend_argument', default=[], secret=True,
help='Arguments supplied to the backend module. '
'Specify this option once per argument to be '
'passed to the dogpile.cache backend. Example '
'format: "<argname>:<value>".'),
cfg.ListOpt('proxies', default=[],
help='Proxy classes to import that will affect the way '
'the dogpile.cache backend functions. See the '
'dogpile.cache documentation on '
'changing-backend-behavior.'),
cfg.BoolOpt('enabled', default=False,
help='Global toggle for all caching using the '
'should_cache_fn mechanism.'),
cfg.BoolOpt('debug_cache_backend', default=False,
help='Extra debugging from the cache backend (cache '
'keys, get/set/delete/etc calls). This is only '
'really useful if you need to see the specific '
'cache-backend get/set/delete calls with the '
'keys/values. Typically this should be left set '
'to false.'),
cfg.ListOpt('memcache_servers', default=['localhost:11211'],
help='Memcache servers in the format of "host:port".'
' (dogpile.cache.memcache and keystone.cache.memcache_pool'
' backends only).'),
cfg.IntOpt('memcache_dead_retry',
default=5 * 60,
help='Number of seconds memcached server is considered dead'
' before it is tried again. (dogpile.cache.memcache and'
' keystone.cache.memcache_pool backends only).'),
cfg.IntOpt('memcache_socket_timeout',
default=3,
help='Timeout in seconds for every call to a server.'
' (dogpile.cache.memcache and keystone.cache.memcache_pool'
' backends only).'),
cfg.IntOpt('memcache_pool_maxsize',
default=10,
help='Max total number of open connections to every'
' memcached server. (keystone.cache.memcache_pool backend'
' only).'),
cfg.IntOpt('memcache_pool_unused_timeout',
default=60,
help='Number of seconds a connection to memcached is held'
' unused in the pool before it is closed.'
' (keystone.cache.memcache_pool backend only).'),
cfg.IntOpt('memcache_pool_connection_get_timeout',
default=10,
help='Number of seconds that an operation will wait to get '
'a memcache client connection.'),
],
}
CONF = cfg.CONF
def configure(conf=None):
if conf is None:
conf = CONF
for section in FILE_OPTIONS:
for option in FILE_OPTIONS[section]:
conf.register_opt(option, group=section)
def list_opts():
"""Return a list of oslo_config options available in Keystone.
The returned list includes all oslo_config options which are registered as
the "FILE_OPTIONS" in keystone.common.config. This list will not include
the options from the oslo-incubator library or any options registered
dynamically at run time.
Each object in the list is a two element tuple. The first element of
each tuple is the name of the group under which the list of options in the
second element will be registered. A group name of None corresponds to the
[DEFAULT] group in config files.
This function is also discoverable via the 'oslo_config.opts' entry point
under the 'keystone.config.opts' namespace.
The purpose of this is to allow tools like the Oslo sample config file
generator to discover the options exposed to users by this library.
:returns: a list of (group_name, opts) tuples
"""
return list(FILE_OPTIONS.items())

View File

@ -20,7 +20,7 @@ import logging
from dogpile.cache.backends import memcached as memcached_backend
from keystone.common.cache import _memcache_pool
from oslo_cache import _memcache_pool
LOG = logging.getLogger(__name__)

View File

@ -22,8 +22,8 @@ from oslo_utils import importutils
from oslo_utils import timeutils
import six
from keystone import exception
from keystone.i18n import _, _LW
from oslo_cache import exception
from oslo_cache._i18n import _, _LW
NO_VALUE = api.NO_VALUE

View File

@ -21,8 +21,8 @@ from oslo_config import cfg
from oslo_log import log
from oslo_utils import importutils
from keystone import exception
from keystone.i18n import _, _LE
from oslo_cache import exception
from oslo_cache._i18n import _, _LE
CONF = cfg.CONF
@ -31,18 +31,18 @@ LOG = log.getLogger(__name__)
make_region = dogpile.cache.make_region
dogpile.cache.register_backend(
'keystone.common.cache.noop',
'keystone.common.cache.backends.noop',
'oslo_cache.noop',
'oslo_cache.backends.noop',
'NoopCacheBackend')
dogpile.cache.register_backend(
'keystone.cache.mongo',
'keystone.common.cache.backends.mongo',
'oslo_cache.mongo',
'oslo_cache.backends.mongo',
'MongoCacheBackend')
dogpile.cache.register_backend(
'keystone.cache.memcache_pool',
'keystone.common.cache.backends.memcache_pool',
'oslo_cache.memcache_pool',
'oslo_cache.backends.memcache_pool',
'PooledMemcachedBackend')
@ -105,7 +105,7 @@ def build_cache_config():
arg_key = '.'.join([prefix, 'arguments', argname])
conf_dict[arg_key] = argvalue
LOG.debug('Keystone Cache Config: %s', conf_dict)
LOG.debug('Oslo Cache Config: %s', conf_dict)
# NOTE(yorik-sar): these arguments will be used for memcache-related
# backends. Use setdefault for url to support old-style setting through
# backend_argument=url:127.0.0.1:11211

92
oslo_cache/exception.py Normal file
View File

@ -0,0 +1,92 @@
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging as log
from oslo_config import cfg
from oslo_utils import encodeutils
import six
from oslo_cache._i18n import _, _LW
CONF = cfg.CONF
LOG = log.getLogger(__name__)
# Tests use this to make exception message format errors fatal
_FATAL_EXCEPTION_FORMAT_ERRORS = False
class Error(Exception):
"""Base error class.
Child classes should define an HTTP status code, title, and a
message_format.
"""
code = None
title = None
message_format = None
def __init__(self, message=None, **kwargs):
try:
message = self._build_message(message, **kwargs)
except KeyError:
# if you see this warning in your logs, please raise a bug report
if _FATAL_EXCEPTION_FORMAT_ERRORS:
raise
else:
LOG.warning(_LW('missing exception kwargs (programmer error)'))
message = self.message_format
super(Error, self).__init__(message)
def _build_message(self, message, **kwargs):
"""Builds and returns an exception message.
:raises: KeyError given insufficient kwargs
"""
if not message:
try:
message = self.message_format % kwargs
except UnicodeDecodeError:
try:
kwargs = {k: encodeutils.safe_decode(v)
for k, v in six.iteritems(kwargs)}
except UnicodeDecodeError:
# NOTE(jamielennox): This is the complete failure case
# at least by showing the template we have some idea
# of where the error is coming from
message = self.message_format
else:
message = self.message_format % kwargs
return message
class ValidationError(Error):
message_format = _("Expecting to find %(attribute)s in %(target)s -"
" the server could not comply with the request"
" since it is either malformed or otherwise"
" incorrect. The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class NotImplemented(Error):
message_format = _("The action you have requested has not"
" been implemented.")
code = 501
title = 'Not Implemented'

View File

@ -0,0 +1,15 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_cache import _opts
_opts.configure()

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# Copyright 2013 Metacloud
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@ -12,17 +12,335 @@
# License for the specific language governing permissions and limitations
# under the License.
"""
test_cache
----------------------------------
Tests for `cache` module.
"""
import copy
import time
import uuid
from dogpile.cache import api
from dogpile.cache import proxy
import mock
from oslo_config import cfg
from oslotest import base
import six
import testtools
from oslo_cache import core as cache
from oslo_cache import exception
from oslo_config import fixture as config_fixture
class TestCache(base.BaseTestCase):
CONF = cfg.CONF
NO_VALUE = api.NO_VALUE
def test_something(self):
pass
class BaseTestCase(base.BaseTestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
self.addCleanup(CONF.reset)
self.config_fixture = self.useFixture(config_fixture.Config(CONF))
self.addCleanup(delattr, self, 'config_fixture')
def _copy_value(value):
if value is not NO_VALUE:
value = copy.deepcopy(value)
return value
# NOTE(morganfainberg): WARNING - It is not recommended to use the Memory
# backend for dogpile.cache in a real deployment under any circumstances. The
# backend does no cleanup of expired values and therefore will leak memory. The
# backend is not implemented in a way to share data across processes (e.g.
# Keystone in HTTPD. This proxy is a hack to get around the lack of isolation
# of values in memory. Currently it blindly stores and retrieves the values
# from the cache, and modifications to dicts/lists/etc returned can result in
# changes to the cached values. In short, do not use the dogpile.cache.memory
# backend unless you are running tests or expecting odd/strange results.
class CacheIsolatingProxy(proxy.ProxyBackend):
"""Proxy that forces a memory copy of stored values.
The default in-memory cache-region does not perform a copy on values it
is meant to cache. Therefore if the value is modified after set or after
get, the cached value also is modified. This proxy does a copy as the last
thing before storing data.
"""
def get(self, key):
return _copy_value(self.proxied.get(key))
def set(self, key, value):
self.proxied.set(key, _copy_value(value))
class TestProxy(proxy.ProxyBackend):
def get(self, key):
value = _copy_value(self.proxied.get(key))
if value is not NO_VALUE:
if isinstance(value[0], TestProxyValue):
value[0].cached = True
return value
class TestProxyValue(object):
def __init__(self, value):
self.value = value
self.cached = False
class CacheRegionTest(BaseTestCase):
def setUp(self):
super(CacheRegionTest, self).setUp()
self.region = cache.make_region()
cache.configure_cache_region(self.region)
self.region.wrap(TestProxy)
self.test_value = TestProxyValue('Decorator Test')
def _add_test_caching_option(self):
self.config_fixture.register_opt(
cfg.BoolOpt('caching', default=True), group='cache')
def _get_cacheable_function(self):
with mock.patch.object(cache.REGION, 'cache_on_arguments',
self.region.cache_on_arguments):
memoize = cache.get_memoization_decorator(section='cache')
@memoize
def cacheable_function(value):
return value
return cacheable_function
# FIXME(dims) : Need to resurrect this test ported from keystone
# def test_region_built_with_proxy_direct_cache_test(self):
# # Verify cache regions are properly built with proxies.
# test_value = TestProxyValue('Direct Cache Test')
# self.region.set('cache_test', test_value)
# cached_value = self.region.get('cache_test')
# self.assertTrue(cached_value.cached)
def test_cache_region_no_error_multiple_config(self):
# Verify configuring the CacheRegion again doesn't error.
cache.configure_cache_region(self.region)
cache.configure_cache_region(self.region)
def _get_cache_fallthrough_fn(self, cache_time):
with mock.patch.object(cache.REGION, 'cache_on_arguments',
self.region.cache_on_arguments):
memoize = cache.get_memoization_decorator(
section='cache',
expiration_section='assignment')
class _test_obj(object):
def __init__(self, value):
self.test_value = value
@memoize
def get_test_value(self):
return self.test_value
def _do_test(value):
test_obj = _test_obj(value)
# Ensure the value has been cached
test_obj.get_test_value()
# Get the now cached value
cached_value = test_obj.get_test_value()
self.assertTrue(cached_value.cached)
self.assertEqual(value.value, cached_value.value)
self.assertEqual(cached_value.value, test_obj.test_value.value)
# Change the underlying value on the test object.
test_obj.test_value = TestProxyValue(uuid.uuid4().hex)
self.assertEqual(cached_value.value,
test_obj.get_test_value().value)
# override the system time to ensure the non-cached new value
# is returned
new_time = time.time() + (cache_time * 2)
with mock.patch.object(time, 'time',
return_value=new_time):
overriden_cache_value = test_obj.get_test_value()
self.assertNotEqual(cached_value.value,
overriden_cache_value.value)
self.assertEqual(test_obj.test_value.value,
overriden_cache_value.value)
return _do_test
# FIXME(dims) : Need to resurrect this test ported from keystone
# def test_cache_no_fallthrough_expiration_time_fn(self):
# # Since we do not re-configure the cache region, for ease of testing
# # this value is set the same as the expiration_time default in the
# # [cache] section
# cache_time = 600
# expiration_time = cache.get_expiration_time_fn('role')
# do_test = self._get_cache_fallthrough_fn(cache_time)
# # Run the test with the assignment cache_time value
# self.config_fixture.config(cache_time=cache_time,
# group='role')
# test_value = TestProxyValue(uuid.uuid4().hex)
# self.assertEqual(cache_time, expiration_time())
# do_test(value=test_value)
# FIXME(dims) : Need to resurrect this test ported from keystone
# def test_cache_fallthrough_expiration_time_fn(self):
# # Since we do not re-configure the cache region, for ease of testing
# # this value is set the same as the expiration_time default in the
# # [cache] section
# cache_time = 599
# expiration_time = cache.get_expiration_time_fn('role')
# do_test = self._get_cache_fallthrough_fn(cache_time)
# # Run the test with the assignment cache_time value set to None and
# # the global value set.
# self.config_fixture.config(cache_time=None, group='role')
# test_value = TestProxyValue(uuid.uuid4().hex)
# self.assertIsNone(expiration_time())
# do_test(value=test_value)
# FIXME(dims) : Need to resurrect this test ported from keystone
# def test_should_cache_fn_global_cache_enabled(self):
# # Verify should_cache_fn generates a sane function for subsystem and
# # functions as expected with caching globally enabled.
# cacheable_function = self._get_cacheable_function()
#
# self.config_fixture.config(group='cache', enabled=True)
# cacheable_function(self.test_value)
# cached_value = cacheable_function(self.test_value)
# self.assertTrue(cached_value.cached)
@testtools.skipIf(six.PY3, 'FIXME: this test does not work python 3.x')
def test_should_cache_fn_global_cache_disabled(self):
# Verify should_cache_fn generates a sane function for subsystem and
# functions as expected with caching globally disabled.
cacheable_function = self._get_cacheable_function()
self.config_fixture.config(group='cache', enabled=False)
cacheable_function(self.test_value)
cached_value = cacheable_function(self.test_value)
self.assertFalse(cached_value.cached)
@testtools.skipIf(six.PY3, 'FIXME: this test does not work python 3.x')
def test_should_cache_fn_global_cache_disabled_section_cache_enabled(self):
# Verify should_cache_fn generates a sane function for subsystem and
# functions as expected with caching globally disabled and the specific
# section caching enabled.
cacheable_function = self._get_cacheable_function()
self._add_test_caching_option()
self.config_fixture.config(group='cache', enabled=False)
self.config_fixture.config(group='cache', caching=True)
cacheable_function(self.test_value)
cached_value = cacheable_function(self.test_value)
self.assertFalse(cached_value.cached)
@testtools.skipIf(six.PY3, 'FIXME: this test does not work python 3.x')
def test_should_cache_fn_global_cache_enabled_section_cache_disabled(self):
# Verify should_cache_fn generates a sane function for subsystem and
# functions as expected with caching globally enabled and the specific
# section caching disabled.
cacheable_function = self._get_cacheable_function()
self._add_test_caching_option()
self.config_fixture.config(group='cache', enabled=True)
self.config_fixture.config(group='cache', caching=False)
cacheable_function(self.test_value)
cached_value = cacheable_function(self.test_value)
self.assertFalse(cached_value.cached)
# FIXME(dims) : Need to resurrect this test ported from keystone
# def test_should_cache_fn_global_cache_enabled_section_cache_enabled(
# self):
# #Verify should_cache_fn generates a sane function for subsystem and
# #functions as expected with caching globally enabled and the specific
# #section caching enabled.
# cacheable_function = self._get_cacheable_function()
#
# self._add_test_caching_option()
# self.config_fixture.config(group='cache', enabled=True)
# self.config_fixture.config(group='cache', caching=True)
#
# cacheable_function(self.test_value)
# cached_value = cacheable_function(self.test_value)
# self.assertTrue(cached_value.cached)
def test_cache_dictionary_config_builder(self):
"""Validate we build a sane dogpile.cache dictionary config."""
self.config_fixture.config(group='cache',
config_prefix='test_prefix',
backend='some_test_backend',
expiration_time=86400,
backend_argument=['arg1:test',
'arg2:test:test',
'arg3.invalid'])
config_dict = cache.build_cache_config()
self.assertEqual(
CONF.cache.backend, config_dict['test_prefix.backend'])
self.assertEqual(
CONF.cache.expiration_time,
config_dict['test_prefix.expiration_time'])
self.assertEqual('test', config_dict['test_prefix.arguments.arg1'])
self.assertEqual('test:test',
config_dict['test_prefix.arguments.arg2'])
self.assertNotIn('test_prefix.arguments.arg3', config_dict)
# FIXME(dims) : Need to resurrect this test ported from keystone
# def test_cache_debug_proxy(self):
# single_value = 'Test Value'
# single_key = 'testkey'
# multi_values = {'key1': 1, 'key2': 2, 'key3': 3}
#
# self.region.set(single_key, single_value)
# self.assertEqual(single_value, self.region.get(single_key))
#
# self.region.delete(single_key)
# self.assertEqual(NO_VALUE, self.region.get(single_key))
#
# self.region.set_multi(multi_values)
# cached_values = self.region.get_multi(multi_values.keys())
# for value in multi_values.values():
# self.assertIn(value, cached_values)
# self.assertEqual(len(multi_values.values()), len(cached_values))
#
# self.region.delete_multi(multi_values.keys())
# for value in self.region.get_multi(multi_values.keys()):
# self.assertEqual(NO_VALUE, value)
def test_configure_non_region_object_raises_error(self):
self.assertRaises(exception.ValidationError,
cache.configure_cache_region,
"bogus")
class CacheNoopBackendTest(BaseTestCase):
def setUp(self):
super(CacheNoopBackendTest, self).setUp()
self.region = cache.make_region()
cache.configure_cache_region(self.region)
def config_overrides(self):
super(CacheNoopBackendTest, self).config_overrides()
self.config_fixture.config(group='cache',
backend='oslo_cache.cache.noop')
@testtools.skipIf(six.PY3, 'FIXME: this test does not work python 3.x')
def test_noop_backend(self):
single_value = 'Test Value'
single_key = 'testkey'
multi_values = {'key1': 1, 'key2': 2, 'key3': 3}
self.region.set(single_key, single_value)
self.assertEqual(NO_VALUE, self.region.get(single_key))
self.region.set_multi(multi_values)
cached_values = self.region.get_multi(multi_values.keys())
self.assertEqual(len(cached_values), len(multi_values.values()))
for value in cached_values:
self.assertEqual(NO_VALUE, value)
# Delete should not raise exceptions
self.region.delete(single_key)
self.region.delete_multi(multi_values.keys())

View File

@ -22,10 +22,9 @@ from dogpile.cache import region as dp_region
import six
from six.moves import range
from keystone.common.cache.backends import mongo
from keystone import exception
from keystone.tests import unit as tests
from oslo_cache.backends import mongo
from oslo_cache import exception
from oslo_cache.tests import test_cache
# Mock database structure sample where 'ks_cache' is database and
# 'cache' is collection. Dogpile CachedValue data is divided in two
@ -278,7 +277,7 @@ class MyTransformer(mongo.BaseTransform):
return super(MyTransformer, self).transform_outgoing(son, collection)
class MongoCache(tests.BaseTestCase):
class MongoCache(test_cache.BaseTestCase):
def setUp(self):
super(MongoCache, self).setUp()
global COLLECTIONS
@ -299,33 +298,33 @@ class MongoCache(tests.BaseTestCase):
self.arguments.pop('db_hosts')
region = dp_region.make_region()
self.assertRaises(exception.ValidationError, region.configure,
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments)
def test_missing_db_name(self):
self.arguments.pop('db_name')
region = dp_region.make_region()
self.assertRaises(exception.ValidationError, region.configure,
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments)
def test_missing_cache_collection_name(self):
self.arguments.pop('cache_collection')
region = dp_region.make_region()
self.assertRaises(exception.ValidationError, region.configure,
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments)
def test_incorrect_write_concern(self):
self.arguments['w'] = 'one value'
region = dp_region.make_region()
self.assertRaises(exception.ValidationError, region.configure,
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments)
def test_correct_write_concern(self):
self.arguments['w'] = 1
region = dp_region.make_region().configure('keystone.cache.mongo',
region = dp_region.make_region().configure('oslo_cache.mongo',
arguments=self.arguments)
random_key = uuid.uuid4().hex
@ -335,7 +334,7 @@ class MongoCache(tests.BaseTestCase):
def test_incorrect_read_preference(self):
self.arguments['read_preference'] = 'inValidValue'
region = dp_region.make_region().configure('keystone.cache.mongo',
region = dp_region.make_region().configure('oslo_cache.mongo',
arguments=self.arguments)
# As per delayed loading of pymongo, read_preference value should
# still be string and NOT enum
@ -347,7 +346,7 @@ class MongoCache(tests.BaseTestCase):
def test_correct_read_preference(self):
self.arguments['read_preference'] = 'secondaryPreferred'
region = dp_region.make_region().configure('keystone.cache.mongo',
region = dp_region.make_region().configure('oslo_cache.mongo',
arguments=self.arguments)
# As per delayed loading of pymongo, read_preference value should
# still be string and NOT enum
@ -365,13 +364,13 @@ class MongoCache(tests.BaseTestCase):
self.arguments['use_replica'] = True
region = dp_region.make_region()
self.assertRaises(exception.ValidationError, region.configure,
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments)
def test_provided_replica_set_name(self):
self.arguments['use_replica'] = True
self.arguments['replicaset_name'] = 'my_replica'
dp_region.make_region().configure('keystone.cache.mongo',
dp_region.make_region().configure('oslo_cache.mongo',
arguments=self.arguments)
self.assertTrue(True) # reached here means no initialization error
@ -379,7 +378,7 @@ class MongoCache(tests.BaseTestCase):
self.arguments['mongo_ttl_seconds'] = 'sixty'
region = dp_region.make_region()
self.assertRaises(exception.ValidationError, region.configure,
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments)
def test_cache_configuration_values_assertion(self):
@ -387,7 +386,7 @@ class MongoCache(tests.BaseTestCase):
self.arguments['replicaset_name'] = 'my_replica'
self.arguments['mongo_ttl_seconds'] = 60
self.arguments['ssl'] = False
region = dp_region.make_region().configure('keystone.cache.mongo',
region = dp_region.make_region().configure('oslo_cache.mongo',
arguments=self.arguments)
# There is no proxy so can access MongoCacheBackend directly
self.assertEqual('localhost:27017', region.backend.api.hosts)
@ -404,7 +403,7 @@ class MongoCache(tests.BaseTestCase):
arguments1 = copy.copy(self.arguments)
arguments1['cache_collection'] = 'cache_region1'
region1 = dp_region.make_region().configure('keystone.cache.mongo',
region1 = dp_region.make_region().configure('oslo_cache.mongo',
arguments=arguments1)
# There is no proxy so can access MongoCacheBackend directly
self.assertEqual('localhost:27017', region1.backend.api.hosts)
@ -428,7 +427,7 @@ class MongoCache(tests.BaseTestCase):
arguments2['cache_collection'] = 'cache_region2'
arguments2['son_manipulator'] = class_name
region2 = dp_region.make_region().configure('keystone.cache.mongo',
region2 = dp_region.make_region().configure('oslo_cache.mongo',
arguments=arguments2)
# There is no proxy so can access MongoCacheBackend directly
self.assertEqual('localhost:27017', region2.backend.api.hosts)
@ -451,7 +450,7 @@ class MongoCache(tests.BaseTestCase):
def test_typical_configuration(self):
dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
self.assertTrue(True) # reached here means no initialization error
@ -459,7 +458,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_get_missing_data(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
@ -470,7 +469,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_set_data(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
@ -481,7 +480,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_set_data_with_string_as_valid_ttl(self):
self.arguments['mongo_ttl_seconds'] = '3600'
region = dp_region.make_region().configure('keystone.cache.mongo',
region = dp_region.make_region().configure('oslo_cache.mongo',
arguments=self.arguments)
self.assertEqual(3600, region.backend.api.ttl_seconds)
random_key = uuid.uuid4().hex
@ -491,7 +490,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_set_data_with_int_as_valid_ttl(self):
self.arguments['mongo_ttl_seconds'] = 1800
region = dp_region.make_region().configure('keystone.cache.mongo',
region = dp_region.make_region().configure('oslo_cache.mongo',
arguments=self.arguments)
self.assertEqual(1800, region.backend.api.ttl_seconds)
random_key = uuid.uuid4().hex
@ -501,7 +500,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_set_none_as_data(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
@ -512,7 +511,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_set_blank_as_data(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
@ -523,7 +522,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_set_same_key_multiple_times(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
@ -541,7 +540,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_multi_set_data(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
random_key = uuid.uuid4().hex
@ -562,7 +561,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_multi_get_data(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
random_key = uuid.uuid4().hex
@ -585,7 +584,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_multi_set_should_update_existing(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
random_key = uuid.uuid4().hex
@ -613,7 +612,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_multi_set_get_with_blanks_none(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
random_key = uuid.uuid4().hex
@ -654,7 +653,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_delete_data(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
@ -669,7 +668,7 @@ class MongoCache(tests.BaseTestCase):
def test_backend_multi_delete_data(self):
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)
random_key = uuid.uuid4().hex
@ -705,7 +704,7 @@ class MongoCache(tests.BaseTestCase):
self.arguments['continue_on_error'] = True
self.arguments['secondary_acceptable_latency_ms'] = 60
region = dp_region.make_region().configure(
'keystone.cache.mongo',
'oslo_cache.mongo',
arguments=self.arguments
)

View File

@ -3,3 +3,8 @@
# process, which may cause wedges in the gate later.
Babel>=1.3
dogpile.cache>=0.5.3
six>=1.9.0
oslo.config>=1.11.0 # Apache-2.0
oslo.log>=1.2.0 # Apache-2.0
oslo.utils>=1.6.0 # Apache-2.0

View File

@ -5,3 +5,7 @@ hacking<0.11,>=0.10.0
oslotest>=1.5.1 # Apache-2.0
oslosphinx>=2.5.0 # Apache-2.0
sphinx>=1.1.2,!=1.2.0,!=1.3b1,<1.3
testtools>=1.4.0
# Optional dogpile backend: MongoDB
pymongo>=3.0.2