A dogpile cache of gnocchi resources

What this does is store a key value pair in oslo_cache where the key
is the resource id and the value is a hash of the frozenset of
the attributes of the resource less the defined metrics[1]. When it
is time to create or update a resource we ask the cache:

  Are the resource attributes I'm about to store the same as the
  last ones stored for this id?

If the answer is yes we don't need to store the resource. That's all
it does and that is all it needs to do because if the cache fails
to have the correct information that's the same as the cache not
existing in the first place.

To get this to work in the face of eventlet's eager beavering we
need to lock around create_resource and update_resource so that
we have a chance to write the cache before another *_resource is
called in this process. Superficial investigation shows that this
works out pretty well because when, for example, you start a new
instance the collector will all of sudden try several
_create_resources, only one of which actually needs to happen.
The lock makes sure only that one happens when there is just
one collector. Where there are several collectors that won't be
the case but _some_ of them will be stopped. And that's the point
here: better not perfect.

The cache is implemented using oslo_cache which can be configured
via oslo_config with an entry such as:

    [cache]
    backend = dogpile.cache.redis
    backend_argument = url:redis://localhost:6379
    backend_argument = db:0
    backend_argument = distributed_lock:True
    backend_argument = redis_expiration_time:600

The cache is exercised most for resource updates (as you might
expect) but does still sometimes get engaged for resource creates
(as described above).

A cache_key_mangler is used to ensure that keys generated by the
gnocchi dispatcher are in their own namespace.

[1] Metrics are not included because they are represented as
sub-dicts which are not hashable and thus cannot go in the
frozenset. Since the metrics are fairly static (coming from a yaml
file near you, soon) this shouldn't be a problem. If it is then we
can come up with a way to create a hash that can deal with
sub-dicts.

Closes-Bug: #1483634
Change-Id: I1f2da145ca87712cd2ff5b8afecf1bca0ba53788
This commit is contained in:
Chris Dent 2015-11-09 16:31:45 +00:00
parent d753c5de26
commit 2511cfb6e4
2 changed files with 70 additions and 4 deletions

View File

@ -12,11 +12,13 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import fnmatch
import itertools
import operator
import os
import threading
import uuid
from oslo_config import cfg
from oslo_log import log
@ -26,9 +28,10 @@ from stevedore import extension
from ceilometer import declarative
from ceilometer import dispatcher
from ceilometer.dispatcher import gnocchi_client
from ceilometer.i18n import _, _LE
from ceilometer.i18n import _, _LE, _LW
from ceilometer import keystone_client
CACHE_NAMESPACE = uuid.uuid4()
LOG = log.getLogger(__name__)
dispatcher_opts = [
@ -56,6 +59,13 @@ dispatcher_opts = [
cfg.CONF.register_opts(dispatcher_opts, group="dispatcher_gnocchi")
def cache_key_mangler(key):
"""Construct an opaque cache key."""
if six.PY2:
key = key.encode('utf-8')
return uuid.uuid5(CACHE_NAMESPACE, key).hex
def log_and_ignore_unexpected_workflow_error(func):
def log_and_ignore(self, *args, **kwargs):
try:
@ -149,8 +159,27 @@ class GnocchiDispatcher(dispatcher.MeterDispatcherBase):
self._ks_client = keystone_client.get_client()
self.resources_definition = self._load_resources_definitions(conf)
self.cache = None
try:
import oslo_cache
oslo_cache.configure(self.conf)
# NOTE(cdent): The default cache backend is a real but
# noop backend. We don't want to use that here because
# we want to avoid the cache pathways entirely if the
# cache has not been configured explicitly.
if 'null' not in self.conf.cache.backend:
cache_region = oslo_cache.create_region()
self.cache = oslo_cache.configure_cache_region(
self.conf, cache_region)
self.cache.key_mangler = cache_key_mangler
except ImportError:
pass
except oslo_cache.exception.ConfigurationError as exc:
LOG.warn(_LW('unable to configure oslo_cache: %s') % exc)
self._gnocchi_project_id = None
self._gnocchi_project_id_lock = threading.Lock()
self._gnocchi_resource_lock = threading.Lock()
self._gnocchi = gnocchi_client.Client(conf.dispatcher_gnocchi.url)
@ -279,13 +308,49 @@ class GnocchiDispatcher(dispatcher.MeterDispatcherBase):
metric_name=metric_name))
if resource_extra:
self._gnocchi.update_resource(resource_type, resource_id,
resource_extra)
if self.cache:
cache_key, attribute_hash = self._check_resource_cache(
resource['id'], resource, 'update')
if attribute_hash:
with self._gnocchi_resource_lock:
self._gnocchi.update_resource(resource_type,
resource_id,
resource_extra)
self.cache.set(cache_key, attribute_hash)
else:
LOG.debug('resource cache hit for update %s',
cache_key)
else:
self._gnocchi.update_resource(resource_type, resource_id,
resource_extra)
def _check_resource_cache(self, resource_id, resource_data, action):
cache_key = resource_id + action
resource_info = copy.deepcopy(resource_data)
if 'metrics' in resource_info:
del resource_info['metrics']
attribute_hash = hash(frozenset(resource_info.items()))
cached_hash = self.cache.get(cache_key)
if cached_hash != attribute_hash:
return cache_key, attribute_hash
return cache_key, None
def _ensure_resource_and_metric(self, resource_type, resource,
metric_name):
try:
self._gnocchi.create_resource(resource_type, resource)
if self.cache:
cache_key, attribute_hash = self._check_resource_cache(
resource['id'], resource, 'create')
if attribute_hash:
with self._gnocchi_resource_lock:
self._gnocchi.create_resource(resource_type,
resource)
self.cache.set(cache_key, attribute_hash)
else:
LOG.debug('resource cache hit for create %s',
cache_key)
else:
self._gnocchi.create_resource(resource_type, resource)
except gnocchi_client.ResourceAlreadyExists:
try:
archive_policy = resource['metrics'][metric_name]

View File

@ -13,6 +13,7 @@ happybase!=0.7,>=0.5;python_version=='2.7'
httplib2>=0.7.5
mock>=1.2
PyMySQL>=0.6.2 # MIT License
oslo.cache>=0.8.0 # Apache-2.0
# Docs Requirements
oslosphinx>=2.5.0 # Apache-2.0
reno>=0.1.1 # Apache2