Copy cache package from oslo-incubator

Related-Bug: #1276440
Change-Id: I7aaf8ae2eb909816d85092baa5c111f00e60d2c7
This commit is contained in:
Jakub Libosvar 2014-02-05 10:07:02 +01:00
parent fa9098529f
commit 494a0f844d
7 changed files with 510 additions and 0 deletions

View File

View File

View File

@ -0,0 +1,165 @@
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from neutron.openstack.common.cache import backends
from neutron.openstack.common import lockutils
from neutron.openstack.common import timeutils
class MemoryBackend(backends.BaseCache):
def __init__(self, parsed_url, options=None):
super(MemoryBackend, self).__init__(parsed_url, options)
self._clear()
def _set_unlocked(self, key, value, ttl=0):
expires_at = 0
if ttl != 0:
expires_at = timeutils.utcnow_ts() + ttl
self._cache[key] = (expires_at, value)
if expires_at:
self._keys_expires[expires_at].add(key)
def _set(self, key, value, ttl=0, not_exists=False):
with lockutils.lock(key):
# NOTE(flaper87): This is needed just in `set`
# calls, hence it's not in `_set_unlocked`
if not_exists and self._exists_unlocked(key):
return False
self._set_unlocked(key, value, ttl)
return True
def _get_unlocked(self, key, default=None):
now = timeutils.utcnow_ts()
try:
timeout, value = self._cache[key]
except KeyError:
return (0, default)
if timeout and now >= timeout:
# NOTE(flaper87): Record expired,
# remove it from the cache but catch
# KeyError and ValueError in case
# _purge_expired removed this key already.
try:
del self._cache[key]
except KeyError:
pass
try:
# NOTE(flaper87): Keys with ttl == 0
# don't exist in the _keys_expires dict
self._keys_expires[timeout].remove(key)
except (KeyError, ValueError):
pass
return (0, default)
return (timeout, value)
def _get(self, key, default=None):
with lockutils.lock(key):
return self._get_unlocked(key, default)[1]
def _exists_unlocked(self, key):
now = timeutils.utcnow_ts()
try:
timeout = self._cache[key][0]
return not timeout or now <= timeout
except KeyError:
return False
def __contains__(self, key):
with lockutils.lock(key):
return self._exists_unlocked(key)
def _incr_append(self, key, other):
with lockutils.lock(key):
timeout, value = self._get_unlocked(key)
if value is None:
return None
ttl = timeutils.utcnow_ts() - timeout
new_value = value + other
self._set_unlocked(key, new_value, ttl)
return new_value
def _incr(self, key, delta):
if not isinstance(delta, int):
raise TypeError('delta must be an int instance')
return self._incr_append(key, delta)
def _append_tail(self, key, tail):
return self._incr_append(key, tail)
def _purge_expired(self):
"""Removes expired keys from the cache."""
now = timeutils.utcnow_ts()
for timeout in sorted(self._keys_expires.keys()):
# NOTE(flaper87): If timeout is greater
# than `now`, stop the iteration, remaining
# keys have not expired.
if now < timeout:
break
# NOTE(flaper87): Unset every key in
# this set from the cache if its timeout
# is equal to `timeout`. (The key might
# have been updated)
for subkey in self._keys_expires.pop(timeout):
try:
if self._cache[subkey][0] == timeout:
del self._cache[subkey]
except KeyError:
continue
def __delitem__(self, key):
self._purge_expired()
# NOTE(flaper87): Delete the key. Using pop
# since it could have been deleted already
value = self._cache.pop(key, None)
if value:
try:
# NOTE(flaper87): Keys with ttl == 0
# don't exist in the _keys_expires dict
self._keys_expires[value[0]].remove(value[1])
except (KeyError, ValueError):
pass
def _clear(self):
self._cache = {}
self._keys_expires = collections.defaultdict(set)
def _get_many(self, keys, default):
return super(MemoryBackend, self)._get_many(keys, default)
def _set_many(self, data, ttl=0):
return super(MemoryBackend, self)._set_many(data, ttl)
def _unset_many(self, keys):
return super(MemoryBackend, self)._unset_many(keys)

View File

@ -0,0 +1,263 @@
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
NOTSET = object()
@six.add_metaclass(abc.ABCMeta)
class BaseCache(object):
"""Base Cache Abstraction
:params parsed_url: Parsed url object.
:params options: A dictionary with configuration parameters
for the cache. For example:
- default_ttl: An integer defining the default ttl
for keys.
"""
def __init__(self, parsed_url, options=None):
self._parsed_url = parsed_url
self._options = options or {}
self._default_ttl = int(self._options.get('default_ttl', 0))
@abc.abstractmethod
def _set(self, key, value, ttl, not_exists=False):
"""Implementations of this class have to override this method."""
def set(self, key, value, ttl, not_exists=False):
"""Sets or updates a cache entry
NOTE: Thread-safety is required and has to be
guaranteed by the backend implementation.
:params key: Item key as string.
:type key: `unicode string`
:params value: Value to assign to the key. This
can be anything that is handled
by the current backend.
:params ttl: Key's timeout in seconds. 0 means
no timeout.
:type ttl: int
:params not_exists: If True, the key will be set
if it doesn't exist. Otherwise,
it'll always be set.
:type not_exists: bool
:returns: True if the operation succeeds, False otherwise.
"""
if ttl is None:
ttl = self._default_ttl
return self._set(key, value, ttl, not_exists)
def __setitem__(self, key, value):
self.set(key, value, self._default_ttl)
def setdefault(self, key, value):
"""Sets the key value to `value` if it doesn't exist
:params key: Item key as string.
:type key: `unicode string`
:params value: Value to assign to the key. This
can be anything that is handled
by the current backend.
"""
try:
return self[key]
except KeyError:
self[key] = value
return value
@abc.abstractmethod
def _get(self, key, default):
"""Implementations of this class have to override this method."""
def get(self, key, default=None):
"""Gets one item from the cache
NOTE: Thread-safety is required and it has to be
guaranteed by the backend implementation.
:params key: Key for the item to retrieve
from the cache.
:params default: The default value to return.
:returns: `key`'s value in the cache if it exists,
otherwise `default` should be returned.
"""
return self._get(key, default)
def __getitem__(self, key):
value = self.get(key, NOTSET)
if value is NOTSET:
raise KeyError
return value
@abc.abstractmethod
def __delitem__(self, key):
"""Removes an item from cache.
NOTE: Thread-safety is required and it has to be
guaranteed by the backend implementation.
:params key: The key to remove.
:returns: The key value if there's one
"""
@abc.abstractmethod
def _clear(self):
"""Implementations of this class have to override this method."""
def clear(self):
"""Removes all items from the cache.
NOTE: Thread-safety is required and it has to be
guaranteed by the backend implementation.
"""
return self._clear()
@abc.abstractmethod
def _incr(self, key, delta):
"""Implementations of this class have to override this method."""
def incr(self, key, delta=1):
"""Increments the value for a key
:params key: The key for the value to be incremented
:params delta: Number of units by which to increment
the value. Pass a negative number to
decrement the value.
:returns: The new value
"""
return self._incr(key, delta)
@abc.abstractmethod
def _append_tail(self, key, tail):
"""Implementations of this class have to override this method."""
def append_tail(self, key, tail):
"""Appends `tail` to `key`'s value.
:params key: The key of the value to which
`tail` should be appended.
:params tail: The list of values to append to the
original.
:returns: The new value
"""
if not hasattr(tail, "__iter__"):
raise TypeError('Tail must be an iterable')
if not isinstance(tail, list):
# NOTE(flaper87): Make sure we pass a list
# down to the implementation. Not all drivers
# have support for generators, sets or other
# iterables.
tail = list(tail)
return self._append_tail(key, tail)
def append(self, key, value):
"""Appends `value` to `key`'s value.
:params key: The key of the value to which
`tail` should be appended.
:params value: The value to append to the
original.
:returns: The new value
"""
return self.append_tail(key, [value])
@abc.abstractmethod
def __contains__(self, key):
"""Verifies that a key exists.
:params key: The key to verify.
:returns: True if the key exists,
otherwise False.
"""
@abc.abstractmethod
def _get_many(self, keys, default):
"""Implementations of this class have to override this method."""
return ((k, self.get(k, default=default)) for k in keys)
def get_many(self, keys, default=NOTSET):
"""Gets keys' value from cache
:params keys: List of keys to retrieve.
:params default: The default value to return
for each key that is not in
the cache.
:returns: A generator of (key, value)
"""
return self._get_many(keys, default)
@abc.abstractmethod
def _set_many(self, data, ttl):
"""Implementations of this class have to override this method."""
for key, value in data.items():
self.set(key, value, ttl=ttl)
def set_many(self, data, ttl=None):
"""Puts several items into the cache at once
Depending on the backend, this operation may or may
not be efficient. The default implementation calls
set for each (key, value) pair passed, other backends
support set_many operations as part of their protocols.
:params data: A dictionary like {key: val} to store
in the cache.
:params ttl: Key's timeout in seconds.
"""
if ttl is None:
ttl = self._default_ttl
self._set_many(data, ttl)
def update(self, **kwargs):
"""Sets several (key, value) paris.
Refer to the `set_many` docstring.
"""
self.set_many(kwargs, ttl=self._default_ttl)
@abc.abstractmethod
def _unset_many(self, keys):
"""Implementations of this class have to override this method."""
for key in keys:
del self[key]
def unset_many(self, keys):
"""Removes several keys from the cache at once
:params keys: List of keys to unset.
"""
self._unset_many(keys)

79
neutron/openstack/common/cache/cache.py vendored Normal file
View File

@ -0,0 +1,79 @@
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Cache library.
Supported configuration options:
`default_backend`: Name of the cache backend to use.
`key_namespace`: Namespace under which keys will be created.
"""
from stevedore import driver
from neutron.openstack.common.py3kcompat import urlutils
def _get_olso_configs():
"""Returns the oslo.config options to register."""
# NOTE(flaper87): Oslo config should be
# optional. Instead of doing try / except
# at the top of this file, lets import cfg
# here and assume that the caller of this
# function already took care of this dependency.
from oslo.config import cfg
return [
cfg.StrOpt('cache_url', default='memory://',
help='URL to connect to the cache back end.')
]
def register_oslo_configs(conf):
"""Registers a cache configuration options
:params conf: Config object.
:type conf: `cfg.ConfigOptions`
"""
conf.register_opts(_get_olso_configs())
def get_cache(url='memory://'):
"""Loads the cache backend
This function loads the cache backend
specified in the given configuration.
:param conf: Configuration instance to use
"""
parsed = urlutils.urlparse(url)
backend = parsed.scheme
query = parsed.query
# NOTE(flaper87): We need the following hack
# for python versions < 2.7.5. Previous versions
# of python parsed query params just for 'known'
# schemes. This was changed in this patch:
# http://hg.python.org/cpython/rev/79e6ff3d9afd
if not query and '?' in parsed.path:
query = parsed.path.split('?', 1)[-1]
parameters = urlutils.parse_qsl(query)
kwargs = {'options': dict(parameters)}
mgr = driver.DriverManager('neutron.openstack.common.cache.backends', backend,
invoke_on_load=True,
invoke_args=[parsed],
invoke_kwds=kwargs)
return mgr.driver

View File

@ -1,5 +1,6 @@
[DEFAULT]
# The list of modules to copy from oslo-incubator.git
module=cache
module=context
module=db
module=db.sqlalchemy

View File

@ -161,6 +161,8 @@ neutron.ml2.mechanism_drivers =
cisco_nexus = neutron.plugins.ml2.drivers.cisco.nexus.mech_cisco_nexus:CiscoNexusMechanismDriver
l2population = neutron.plugins.ml2.drivers.l2pop.mech_driver:L2populationMechanismDriver
bigswitch = neutron.plugins.ml2.drivers.mech_bigswitch.driver:BigSwitchMechanismDriver
neutron.openstack.common.cache.backends =
memory = neutron.openstack.common.cache._backends.memory:MemoryBackend
[build_sphinx]
all_files = 1