Switch to oslo.cache

Oslo incubator is about to stop for cache module. We could use
oslo.cache instead. The legacy memory backend is replaced by
oslo_cache.dict.

Closes-Bug: #1517883

Change-Id: I108242ca9f27c9ec47959ce7615bc7d84cae014b
This commit is contained in:
Fang Zhen 2016-03-09 14:46:12 +08:00 committed by Ihar Hrachyshka
parent 96a195c064
commit 56efc8ac76
17 changed files with 331 additions and 659 deletions

View File

@ -4,3 +4,4 @@ wrap_width = 79
namespace = neutron.metadata.agent
namespace = oslo.log
namespace = oslo.cache

View File

@ -30,12 +30,11 @@ from neutron._i18n import _, _LE, _LW
from neutron.agent.linux import utils as agent_utils
from neutron.agent.metadata import config
from neutron.agent import rpc as agent_rpc
from neutron.common import cache_utils as cache
from neutron.common import constants as n_const
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron.openstack.common.cache import cache
LOG = logging.getLogger(__name__)
@ -76,10 +75,7 @@ class MetadataProxyHandler(object):
def __init__(self, conf):
self.conf = conf
if self.conf.cache_url:
self._cache = cache.get_cache(self.conf.cache_url)
else:
self._cache = False
self._cache = cache.get_cache(self.conf)
self.plugin_rpc = MetadataPluginAPI(topics.PLUGIN)
self.context = context.get_admin_context_without_session()
@ -121,13 +117,13 @@ class MetadataProxyHandler(object):
return filters
@utils.cache_method_results
@cache.cache_method_results
def _get_router_networks(self, router_id):
"""Find all networks connected to given router."""
internal_ports = self._get_ports_from_server(router_id=router_id)
return tuple(p['network_id'] for p in internal_ports)
@utils.cache_method_results
@cache.cache_method_results
def _get_ports_for_remote_address(self, remote_address, networks):
"""Get list of ports that has given ip address and are part of
given networks.

View File

@ -20,9 +20,9 @@ from oslo_log import log as logging
from neutron.agent.common import config as agent_conf
from neutron.agent.metadata import agent
from neutron.agent.metadata import config as metadata_conf
from neutron.common import cache_utils as cache
from neutron.common import config
from neutron.common import utils
from neutron.openstack.common.cache import cache
LOG = logging.getLogger(__name__)

View File

@ -0,0 +1,150 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from oslo_cache import core as cache
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import reflection
from six.moves.urllib import parse
from neutron._i18n import _
from neutron.common import utils
cache_opts = [
cfg.StrOpt('cache_url', default='',
deprecated_for_removal=True,
help=_('URL to connect to the cache back end. '
'This option is deprecated in the Newton release and '
'will be removed. Please add a [cache] group for '
'oslo.cache in your neutron.conf and add "enable" and '
'"backend" options in this section.')),
]
LOG = logging.getLogger(__name__)
def register_oslo_configs(conf):
conf.register_opts(cache_opts)
cache.configure(conf)
def get_cache(conf):
"""Used to get cache client"""
# cache_url is still used, we just respect it. Memory backend is the only
# backend supported before and default_ttl is the only options of Memory
# backend. We use dict backend of oslo.cache for this.
if conf.cache_url:
return _get_cache_region_for_legacy(conf.cache_url)
elif conf.cache.enabled:
return _get_cache_region(conf)
else:
return False
def _get_cache_region(conf):
region = cache.create_region()
cache.configure_cache_region(conf, region)
return region
def _get_cache_region_for_legacy(url):
parsed = parse.urlparse(url)
backend = parsed.scheme
if backend == 'memory':
backend = 'oslo_cache.dict'
query = parsed.query
# NOTE(fangzhen): The following NOTE and code is from legacy
# oslo-incubator cache module. Previously reside in neutron at
# neutron/openstack/common/cache/cache.py:78
# NOTE(flaper87): We need the following hack
# for python versions < 2.7.5. Previous versions
# of python parsed query params just for 'known'
# schemes. This was changed in this patch:
# http://hg.python.org/cpython/rev/79e6ff3d9afd
if not query and '?' in parsed.path:
query = parsed.path.split('?', 1)[-1]
parameters = parse.parse_qs(query)
expiration_time = int(parameters.get('default_ttl', [0])[0])
region = cache.create_region()
region.configure(backend, expiration_time=expiration_time)
return region
else:
raise RuntimeError(_('Old style configuration can use only memory '
'(dict) backend'))
class cache_method_results(object):
"""This decorator is intended for object methods only."""
def __init__(self, func):
self.func = func
functools.update_wrapper(self, func)
self._first_call = True
self._not_cached = cache.NO_VALUE
def _get_from_cache(self, target_self, *args, **kwargs):
target_self_cls_name = reflection.get_class_name(target_self,
fully_qualified=False)
func_name = "%(module)s.%(class)s.%(func_name)s" % {
'module': target_self.__module__,
'class': target_self_cls_name,
'func_name': self.func.__name__,
}
key = (func_name,) + args
if kwargs:
key += utils.dict2tuple(kwargs)
try:
item = target_self._cache.get(key)
except TypeError:
LOG.debug("Method %(func_name)s cannot be cached due to "
"unhashable parameters: args: %(args)s, kwargs: "
"%(kwargs)s",
{'func_name': func_name,
'args': args,
'kwargs': kwargs})
return self.func(target_self, *args, **kwargs)
if item is self._not_cached:
item = self.func(target_self, *args, **kwargs)
target_self._cache.set(key, item)
return item
def __call__(self, target_self, *args, **kwargs):
target_self_cls_name = reflection.get_class_name(target_self,
fully_qualified=False)
if not hasattr(target_self, '_cache'):
raise NotImplementedError(
_("Instance of class %(module)s.%(class)s must contain _cache "
"attribute") % {
'module': target_self.__module__,
'class': target_self_cls_name})
if not target_self._cache:
if self._first_call:
LOG.debug("Instance of class %(module)s.%(class)s doesn't "
"contain attribute _cache therefore results "
"cannot be cached for %(func_name)s.",
{'module': target_self.__module__,
'class': target_self_cls_name,
'func_name': self.func.__name__})
self._first_call = False
return self.func(target_self, *args, **kwargs)
return self._get_from_cache(target_self, *args, **kwargs)
def __get__(self, obj, objtype):
return functools.partial(self.__call__, obj)

View File

@ -42,7 +42,6 @@ from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import reflection
import six
from stevedore import driver
@ -58,68 +57,6 @@ MAX_UINT16 = 0xffff
synchronized = lockutils.synchronized_with_prefix(SYNCHRONIZED_PREFIX)
class cache_method_results(object):
"""This decorator is intended for object methods only."""
def __init__(self, func):
self.func = func
functools.update_wrapper(self, func)
self._first_call = True
self._not_cached = object()
def _get_from_cache(self, target_self, *args, **kwargs):
target_self_cls_name = reflection.get_class_name(target_self,
fully_qualified=False)
func_name = "%(module)s.%(class)s.%(func_name)s" % {
'module': target_self.__module__,
'class': target_self_cls_name,
'func_name': self.func.__name__,
}
key = (func_name,) + args
if kwargs:
key += dict2tuple(kwargs)
try:
item = target_self._cache.get(key, self._not_cached)
except TypeError:
LOG.debug("Method %(func_name)s cannot be cached due to "
"unhashable parameters: args: %(args)s, kwargs: "
"%(kwargs)s",
{'func_name': func_name,
'args': args,
'kwargs': kwargs})
return self.func(target_self, *args, **kwargs)
if item is self._not_cached:
item = self.func(target_self, *args, **kwargs)
target_self._cache.set(key, item, None)
return item
def __call__(self, target_self, *args, **kwargs):
target_self_cls_name = reflection.get_class_name(target_self,
fully_qualified=False)
if not hasattr(target_self, '_cache'):
raise NotImplementedError(
_("Instance of class %(module)s.%(class)s must contain _cache "
"attribute") % {
'module': target_self.__module__,
'class': target_self_cls_name})
if not target_self._cache:
if self._first_call:
LOG.debug("Instance of class %(module)s.%(class)s doesn't "
"contain attribute _cache therefore results "
"cannot be cached for %(func_name)s.",
{'module': target_self.__module__,
'class': target_self_cls_name,
'func_name': self.func.__name__})
self._first_call = False
return self.func(target_self, *args, **kwargs)
return self._get_from_cache(target_self, *args, **kwargs)
def __get__(self, obj, objtype):
return functools.partial(self.__call__, obj)
def ensure_dir(dir_path):
"""Ensure a directory with 755 permissions mode."""
try:

View File

@ -1,166 +0,0 @@
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from oslo_concurrency import lockutils
from oslo_utils import timeutils
from neutron.openstack.common.cache import backends
class MemoryBackend(backends.BaseCache):
def __init__(self, parsed_url, options=None):
super(MemoryBackend, self).__init__(parsed_url, options)
self._clear()
def _set_unlocked(self, key, value, ttl=0):
expires_at = 0
if ttl != 0:
expires_at = timeutils.utcnow_ts() + ttl
self._cache[key] = (expires_at, value)
if expires_at:
self._keys_expires[expires_at].add(key)
def _set(self, key, value, ttl=0, not_exists=False):
with lockutils.lock(key):
# NOTE(flaper87): This is needed just in `set`
# calls, hence it's not in `_set_unlocked`
if not_exists and self._exists_unlocked(key):
return False
self._set_unlocked(key, value, ttl)
return True
def _get_unlocked(self, key, default=None):
now = timeutils.utcnow_ts()
try:
timeout, value = self._cache[key]
except KeyError:
return (0, default)
if timeout and now >= timeout:
# NOTE(flaper87): Record expired,
# remove it from the cache but catch
# KeyError and ValueError in case
# _purge_expired removed this key already.
try:
del self._cache[key]
except KeyError:
pass
try:
# NOTE(flaper87): Keys with ttl == 0
# don't exist in the _keys_expires dict
self._keys_expires[timeout].remove(key)
except (KeyError, ValueError):
pass
return (0, default)
return (timeout, value)
def _get(self, key, default=None):
with lockutils.lock(key):
return self._get_unlocked(key, default)[1]
def _exists_unlocked(self, key):
now = timeutils.utcnow_ts()
try:
timeout = self._cache[key][0]
return not timeout or now <= timeout
except KeyError:
return False
def __contains__(self, key):
with lockutils.lock(key):
return self._exists_unlocked(key)
def _incr_append(self, key, other):
with lockutils.lock(key):
timeout, value = self._get_unlocked(key)
if value is None:
return None
ttl = timeutils.utcnow_ts() - timeout
new_value = value + other
self._set_unlocked(key, new_value, ttl)
return new_value
def _incr(self, key, delta):
if not isinstance(delta, int):
raise TypeError('delta must be an int instance')
return self._incr_append(key, delta)
def _append_tail(self, key, tail):
return self._incr_append(key, tail)
def _purge_expired(self):
"""Removes expired keys from the cache."""
now = timeutils.utcnow_ts()
for timeout in sorted(self._keys_expires.keys()):
# NOTE(flaper87): If timeout is greater
# than `now`, stop the iteration, remaining
# keys have not expired.
if now < timeout:
break
# NOTE(flaper87): Unset every key in
# this set from the cache if its timeout
# is equal to `timeout`. (The key might
# have been updated)
for subkey in self._keys_expires.pop(timeout):
try:
if self._cache[subkey][0] == timeout:
del self._cache[subkey]
except KeyError:
continue
def __delitem__(self, key):
self._purge_expired()
# NOTE(flaper87): Delete the key. Using pop
# since it could have been deleted already
value = self._cache.pop(key, None)
if value:
try:
# NOTE(flaper87): Keys with ttl == 0
# don't exist in the _keys_expires dict
self._keys_expires[value[0]].remove(key)
except (KeyError, ValueError):
pass
def _clear(self):
self._cache = {}
self._keys_expires = collections.defaultdict(set)
def _get_many(self, keys, default):
return super(MemoryBackend, self)._get_many(keys, default)
def _set_many(self, data, ttl=0):
return super(MemoryBackend, self)._set_many(data, ttl)
def _unset_many(self, keys):
return super(MemoryBackend, self)._unset_many(keys)

View File

@ -1,250 +0,0 @@
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
NOTSET = object()
@six.add_metaclass(abc.ABCMeta)
class BaseCache(object):
"""Base Cache Abstraction
:params parsed_url: Parsed url object.
:params options: A dictionary with configuration parameters
for the cache. For example:
- default_ttl: An integer defining the default ttl for keys.
"""
def __init__(self, parsed_url, options=None):
self._parsed_url = parsed_url
self._options = options or {}
self._default_ttl = int(self._options.get('default_ttl', 0))
@abc.abstractmethod
def _set(self, key, value, ttl, not_exists=False):
"""Implementations of this class have to override this method."""
def set(self, key, value, ttl, not_exists=False):
"""Sets or updates a cache entry
.. note:: Thread-safety is required and has to be guaranteed by the
backend implementation.
:params key: Item key as string.
:type key: `unicode string`
:params value: Value to assign to the key. This can be anything that
is handled by the current backend.
:params ttl: Key's timeout in seconds. 0 means no timeout.
:type ttl: int
:params not_exists: If True, the key will be set if it doesn't exist.
Otherwise, it'll always be set.
:type not_exists: bool
:returns: True if the operation succeeds, False otherwise.
"""
if ttl is None:
ttl = self._default_ttl
return self._set(key, value, ttl, not_exists)
def __setitem__(self, key, value):
self.set(key, value, self._default_ttl)
def setdefault(self, key, value):
"""Sets the key value to `value` if it doesn't exist
:params key: Item key as string.
:type key: `unicode string`
:params value: Value to assign to the key. This can be anything that
is handled by the current backend.
"""
try:
return self[key]
except KeyError:
self[key] = value
return value
@abc.abstractmethod
def _get(self, key, default):
"""Implementations of this class have to override this method."""
def get(self, key, default=None):
"""Gets one item from the cache
.. note:: Thread-safety is required and it has to be guaranteed
by the backend implementation.
:params key: Key for the item to retrieve from the cache.
:params default: The default value to return.
:returns: `key`'s value in the cache if it exists, otherwise
`default` should be returned.
"""
return self._get(key, default)
def __getitem__(self, key):
value = self.get(key, NOTSET)
if value is NOTSET:
raise KeyError
return value
@abc.abstractmethod
def __delitem__(self, key):
"""Removes an item from cache.
.. note:: Thread-safety is required and it has to be guaranteed by
the backend implementation.
:params key: The key to remove.
:returns: The key value if there's one
"""
@abc.abstractmethod
def _clear(self):
"""Implementations of this class have to override this method."""
def clear(self):
"""Removes all items from the cache.
.. note:: Thread-safety is required and it has to be guaranteed by
the backend implementation.
"""
return self._clear()
@abc.abstractmethod
def _incr(self, key, delta):
"""Implementations of this class have to override this method."""
def incr(self, key, delta=1):
"""Increments the value for a key
:params key: The key for the value to be incremented
:params delta: Number of units by which to increment the value.
Pass a negative number to decrement the value.
:returns: The new value
"""
return self._incr(key, delta)
@abc.abstractmethod
def _append_tail(self, key, tail):
"""Implementations of this class have to override this method."""
def append_tail(self, key, tail):
"""Appends `tail` to `key`'s value.
:params key: The key of the value to which `tail` should be appended.
:params tail: The list of values to append to the original.
:returns: The new value
"""
if not hasattr(tail, "__iter__"):
raise TypeError('Tail must be an iterable')
if not isinstance(tail, list):
# NOTE(flaper87): Make sure we pass a list
# down to the implementation. Not all drivers
# have support for generators, sets or other
# iterables.
tail = list(tail)
return self._append_tail(key, tail)
def append(self, key, value):
"""Appends `value` to `key`'s value.
:params key: The key of the value to which `tail` should be appended.
:params value: The value to append to the original.
:returns: The new value
"""
return self.append_tail(key, [value])
@abc.abstractmethod
def __contains__(self, key):
"""Verifies that a key exists.
:params key: The key to verify.
:returns: True if the key exists, otherwise False.
"""
@abc.abstractmethod
def _get_many(self, keys, default):
"""Implementations of this class have to override this method."""
return ((k, self.get(k, default=default)) for k in keys)
def get_many(self, keys, default=NOTSET):
"""Gets keys' value from cache
:params keys: List of keys to retrieve.
:params default: The default value to return for each key that is not
in the cache.
:returns: A generator of (key, value)
"""
return self._get_many(keys, default)
@abc.abstractmethod
def _set_many(self, data, ttl):
"""Implementations of this class have to override this method."""
for key, value in data.items():
self.set(key, value, ttl=ttl)
def set_many(self, data, ttl=None):
"""Puts several items into the cache at once
Depending on the backend, this operation may or may not be efficient.
The default implementation calls set for each (key, value) pair
passed, other backends support set_many operations as part of their
protocols.
:params data: A dictionary like {key: val} to store in the cache.
:params ttl: Key's timeout in seconds.
"""
if ttl is None:
ttl = self._default_ttl
self._set_many(data, ttl)
def update(self, **kwargs):
"""Sets several (key, value) paris.
Refer to the `set_many` docstring.
"""
self.set_many(kwargs, ttl=self._default_ttl)
@abc.abstractmethod
def _unset_many(self, keys):
"""Implementations of this class have to override this method."""
for key in keys:
del self[key]
def unset_many(self, keys):
"""Removes several keys from the cache at once
:params keys: List of keys to unset.
"""
self._unset_many(keys)

View File

@ -1,92 +0,0 @@
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Cache library.
Supported configuration options:
`default_backend`: Name of the cache backend to use.
`key_namespace`: Namespace under which keys will be created.
"""
########################################################################
#
# THIS MODULE IS DEPRECATED
#
# Please refer to
# https://etherpad.openstack.org/p/kilo-neutron-library-proposals for
# the discussion leading to this deprecation.
#
# We recommend helping with the new oslo.cache library being created
# as a wrapper for dogpile.
#
########################################################################
from six.moves.urllib import parse
from stevedore import driver
def _get_oslo_configs():
"""Returns the oslo config options to register."""
# NOTE(flaper87): Oslo config should be
# optional. Instead of doing try / except
# at the top of this file, lets import cfg
# here and assume that the caller of this
# function already took care of this dependency.
from oslo_config import cfg
return [
cfg.StrOpt('cache_url', default='memory://',
help='URL to connect to the cache back end.')
]
def register_oslo_configs(conf):
"""Registers a cache configuration options
:params conf: Config object.
:type conf: `cfg.ConfigOptions`
"""
conf.register_opts(_get_oslo_configs())
def get_cache(url='memory://'):
"""Loads the cache backend
This function loads the cache backend
specified in the given configuration.
:param conf: Configuration instance to use
"""
parsed = parse.urlparse(url)
backend = parsed.scheme
query = parsed.query
# NOTE(flaper87): We need the following hack
# for python versions < 2.7.5. Previous versions
# of python parsed query params just for 'known'
# schemes. This was changed in this patch:
# http://hg.python.org/cpython/rev/79e6ff3d9afd
if not query and '?' in parsed.path:
query = parsed.path.split('?', 1)[-1]
parameters = parse.parse_qsl(query)
kwargs = {'options': dict(parameters)}
mgr = driver.DriverManager('neutron.openstack.common.cache.backends', backend,
invoke_on_load=True,
invoke_args=[parsed],
invoke_kwds=kwargs)
return mgr.driver

View File

@ -29,6 +29,7 @@ import neutron.agent.linux.ra
import neutron.agent.metadata.config
import neutron.agent.ovsdb.api
import neutron.agent.securitygroups_rpc
import neutron.common.cache_utils
import neutron.conf.quota
import neutron.conf.service
import neutron.db.agents_db
@ -43,7 +44,6 @@ import neutron.db.migration.cli
import neutron.extensions.allowedaddresspairs
import neutron.extensions.l3
import neutron.extensions.securitygroup
import neutron.openstack.common.cache.cache
import neutron.plugins.ml2.config
import neutron.plugins.ml2.drivers.agent.config
import neutron.plugins.ml2.drivers.linuxbridge.agent.common.config
@ -218,7 +218,7 @@ def list_metadata_agent_opts():
neutron.agent.metadata.config.SHARED_OPTS,
neutron.agent.metadata.config.METADATA_PROXY_HANDLER_OPTS,
neutron.agent.metadata.config.UNIX_DOMAIN_METADATA_PROXY_OPTS,
neutron.openstack.common.cache.cache._get_oslo_configs())
neutron.common.cache_utils.cache_opts)
),
('AGENT', neutron.agent.common.config.AGENT_STATE_OPTS)
]

View File

@ -17,35 +17,47 @@ from neutron_lib import constants as n_const
import testtools
import webob
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from neutron.agent.linux import utils as agent_utils
from neutron.agent.metadata import agent
from neutron.agent.metadata import config
from neutron.agent import metadata_agent
from neutron.common import cache_utils as cache
from neutron.common import utils
from neutron.tests import base
class FakeConf(object):
auth_ca_cert = None
nova_metadata_ip = '9.9.9.9'
nova_metadata_port = 8775
metadata_proxy_shared_secret = 'secret'
nova_metadata_protocol = 'http'
nova_metadata_insecure = True
nova_client_cert = 'nova_cert'
nova_client_priv_key = 'nova_priv_key'
cache_url = ''
class ConfFixture(config_fixture.Config):
def setUp(self):
super(ConfFixture, self).setUp()
self.conf.register_opts(config.METADATA_PROXY_HANDLER_OPTS)
self.config(auth_ca_cert=None,
nova_metadata_ip='9.9.9.9',
nova_metadata_port=8775,
metadata_proxy_shared_secret='secret',
nova_metadata_protocol='http',
nova_metadata_insecure=True,
nova_client_cert='nova_cert',
nova_client_priv_key='nova_priv_key')
cache.register_oslo_configs(self.conf)
self.config(cache_url='')
class FakeConfCache(FakeConf):
cache_url = 'memory://?default_ttl=5'
class CacheConfFixture(ConfFixture):
def setUp(self):
super(CacheConfFixture, self).setUp()
self.config(cache_url='memory://?default_ttl=5')
class TestMetadataProxyHandlerBase(base.BaseTestCase):
fake_conf = FakeConf
fake_conf = cfg.CONF
fake_conf_fixture = ConfFixture(fake_conf)
def setUp(self):
super(TestMetadataProxyHandlerBase, self).setUp()
self.useFixture(self.fake_conf_fixture)
self.log_p = mock.patch.object(agent, 'LOG')
self.log = self.log_p.start()
self.handler = agent.MetadataProxyHandler(self.fake_conf)
@ -85,7 +97,8 @@ class TestMetadataProxyHandlerRpc(TestMetadataProxyHandlerBase):
class TestMetadataProxyHandlerCache(TestMetadataProxyHandlerBase):
fake_conf = FakeConfCache
fake_conf = cfg.CONF
fake_conf_fixture = CacheConfFixture(fake_conf)
def test_call(self):
req = mock.Mock()
@ -337,10 +350,10 @@ class TestMetadataProxyHandlerCache(TestMetadataProxyHandlerBase):
ca_certs=None, disable_ssl_certificate_validation=True)
mock_http.assert_has_calls([
mock.call().add_certificate(
FakeConf.nova_client_priv_key,
FakeConf.nova_client_cert,
"%s:%s" % (FakeConf.nova_metadata_ip,
FakeConf.nova_metadata_port)
self.fake_conf.nova_client_priv_key,
self.fake_conf.nova_client_cert,
"%s:%s" % (self.fake_conf.nova_metadata_ip,
self.fake_conf.nova_metadata_port)
),
mock.call().request(
'http://9.9.9.9:8775/the_path',
@ -399,7 +412,8 @@ class TestMetadataProxyHandlerCache(TestMetadataProxyHandlerBase):
class TestMetadataProxyHandlerNoCache(TestMetadataProxyHandlerCache):
fake_conf = FakeConf
fake_conf = cfg.CONF
fake_conf_fixture = ConfFixture(fake_conf)
def test_get_router_networks_twice(self):
self._test_get_router_networks_twice_helper()

View File

@ -0,0 +1,130 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from neutron.common import cache_utils as cache
from neutron.tests import base
class CacheConfFixture(config_fixture.Config):
def setUp(self):
super(CacheConfFixture, self).setUp()
cache.register_oslo_configs(self.conf)
self.config(enabled=True, group='cache')
class TestOsloCache(base.BaseTestCase):
def setUp(self):
super(TestOsloCache, self).setUp()
self.memory_conf = cfg.ConfigOpts()
memory_conf_fixture = CacheConfFixture(self.memory_conf)
self.useFixture(memory_conf_fixture)
memory_conf_fixture.config(cache_url='memory://?default_ttl=5')
self.dict_conf = cfg.ConfigOpts()
dict_conf_fixture = CacheConfFixture(self.dict_conf)
self.useFixture(dict_conf_fixture)
dict_conf_fixture.config(expiration_time=60,
backend='oslo_cache.dict', group='cache')
self.null_cache_conf = cfg.ConfigOpts()
null_conf_fixture = CacheConfFixture(self.null_cache_conf)
self.useFixture(null_conf_fixture)
null_conf_fixture.config(expiration_time=600,
backend='dogpile.cache.null', group='cache')
def _test_get_cache_region_helper(self, conf):
region = cache._get_cache_region(conf)
self.assertIsNotNone(region)
def test_get_cache_region(self):
self._test_get_cache_region_helper(self.dict_conf)
self._test_get_cache_region_helper(self.null_cache_conf)
def test_get_cache_region_for_legacy(self):
region = cache._get_cache_region_for_legacy("memory://?default_ttl=10")
self.assertIsNotNone(region)
self.assertEqual(10, region.expiration_time)
@mock.patch('neutron.common.cache_utils._get_cache_region_for_legacy')
@mock.patch('neutron.common.cache_utils._get_cache_region')
def test_get_cache(self, mock_get_cache_region,
mock_get_cache_region_for_legacy):
self.assertIsNotNone(cache.get_cache(self.memory_conf))
self.assertIsNotNone(cache.get_cache(self.dict_conf))
self.assertIsNotNone(cache.get_cache(self.null_cache_conf))
mock_get_cache_region.assert_has_calls(
[mock.call(self.dict_conf),
mock.call(self.null_cache_conf)]
)
mock_get_cache_region_for_legacy.assert_has_calls(
[mock.call('memory://?default_ttl=5')]
)
class _CachingDecorator(object):
def __init__(self):
self.func_retval = 'bar'
self._cache = mock.Mock()
@cache.cache_method_results
def func(self, *args, **kwargs):
return self.func_retval
class TestCachingDecorator(base.BaseTestCase):
def setUp(self):
super(TestCachingDecorator, self).setUp()
self.decor = _CachingDecorator()
self.func_name = '%(module)s._CachingDecorator.func' % {
'module': self.__module__
}
self.not_cached = self.decor.func.func.__self__._not_cached
def test_cache_miss(self):
expected_key = (self.func_name, 1, 2, ('foo', 'bar'))
args = (1, 2)
kwargs = {'foo': 'bar'}
self.decor._cache.get.return_value = self.not_cached
retval = self.decor.func(*args, **kwargs)
self.decor._cache.set.assert_called_once_with(
expected_key, self.decor.func_retval)
self.assertEqual(self.decor.func_retval, retval)
def test_cache_hit(self):
expected_key = (self.func_name, 1, 2, ('foo', 'bar'))
args = (1, 2)
kwargs = {'foo': 'bar'}
retval = self.decor.func(*args, **kwargs)
self.assertFalse(self.decor._cache.set.called)
self.assertEqual(self.decor._cache.get.return_value, retval)
self.decor._cache.get.assert_called_once_with(expected_key)
def test_get_unhashable(self):
expected_key = (self.func_name, [1], 2)
self.decor._cache.get.side_effect = TypeError
retval = self.decor.func([1], 2)
self.assertFalse(self.decor._cache.set.called)
self.assertEqual(self.decor.func_retval, retval)
self.decor._cache.get.assert_called_once_with(expected_key)
def test_missing_cache(self):
delattr(self.decor, '_cache')
self.assertRaises(NotImplementedError, self.decor.func, (1, 2))
def test_no_cache(self):
self.decor._cache = False
retval = self.decor.func((1, 2))
self.assertEqual(self.decor.func_retval, retval)

View File

@ -414,64 +414,6 @@ class TestDictUtils(base.BaseTestCase):
self.assertEqual(removed, [dict(key3="value3")])
class _CachingDecorator(object):
def __init__(self):
self.func_retval = 'bar'
self._cache = mock.Mock()
@utils.cache_method_results
def func(self, *args, **kwargs):
return self.func_retval
class TestCachingDecorator(base.BaseTestCase):
def setUp(self):
super(TestCachingDecorator, self).setUp()
self.decor = _CachingDecorator()
self.func_name = '%(module)s._CachingDecorator.func' % {
'module': self.__module__
}
self.not_cached = self.decor.func.func.__self__._not_cached
def test_cache_miss(self):
expected_key = (self.func_name, 1, 2, ('foo', 'bar'))
args = (1, 2)
kwargs = {'foo': 'bar'}
self.decor._cache.get.return_value = self.not_cached
retval = self.decor.func(*args, **kwargs)
self.decor._cache.set.assert_called_once_with(
expected_key, self.decor.func_retval, None)
self.assertEqual(self.decor.func_retval, retval)
def test_cache_hit(self):
expected_key = (self.func_name, 1, 2, ('foo', 'bar'))
args = (1, 2)
kwargs = {'foo': 'bar'}
retval = self.decor.func(*args, **kwargs)
self.assertFalse(self.decor._cache.set.called)
self.assertEqual(self.decor._cache.get.return_value, retval)
self.decor._cache.get.assert_called_once_with(expected_key,
self.not_cached)
def test_get_unhashable(self):
expected_key = (self.func_name, [1], 2)
self.decor._cache.get.side_effect = TypeError
retval = self.decor.func([1], 2)
self.assertFalse(self.decor._cache.set.called)
self.assertEqual(self.decor.func_retval, retval)
self.decor._cache.get.assert_called_once_with(expected_key,
self.not_cached)
def test_missing_cache(self):
delattr(self.decor, '_cache')
self.assertRaises(NotImplementedError, self.decor.func, (1, 2))
def test_no_cache(self):
self.decor._cache = False
retval = self.decor.func((1, 2))
self.assertEqual(self.decor.func_retval, retval)
class TestDict2Tuples(base.BaseTestCase):
def test_dict(self):
input_dict = {'foo': 'bar', '42': 'baz', 'aaa': 'zzz'}

View File

@ -0,0 +1,9 @@
---
features:
- Neutron switched to using oslo.cache library to cache port state in
metadata agent. With it, more caching backends are now available, including
Memcached and Mongo. More details in oslo.cache documentation.
deprecations:
- The cache_url configuration option is deprecated as of Newton, and will be
removed in Ocata. Please configure metadata cache using [cache] group,
setting enable = True and configuring your backend.

View File

@ -27,6 +27,7 @@ keystoneauth1>=2.1.0 # Apache-2.0
alembic>=0.8.4 # MIT
six>=1.9.0 # MIT
stevedore>=1.10.0 # Apache-2.0
oslo.cache>=1.5.0 # Apache-2.0
oslo.concurrency>=3.8.0 # Apache-2.0
oslo.config>=3.9.0 # Apache-2.0
oslo.context>=2.2.0 # Apache-2.0