Implement Barbican cache for quick secret payload/ref data
This patchset implements caching lookup and reverse-lookup functions to allow for much faster retrieval of encrypted data from Barbican, which doesn't currently support batched requests in its Secrets API. This behavior is necessary since Deckhand has to potentially retrieve and store up to dozens of secrets per request. Note that data for both lookup functions are invalidated together, as they are tied to the same cache. This change implements caching around arguably the most expensive operation in Deckhand: encryption. By caching encryption itself, the performance of rendering documents thereby increases in a meaningful way, without having to implement much logic to pull it off. A follow up patch set here: https://review.openstack.org/#/c/585842 focuses on caching rendered documents themselves. Change-Id: I0d330690a3c5e899b763ddcaa00d356007aa23fb
This commit is contained in:
parent
6dfd43d207
commit
3af01d63d5
80
deckhand/barbican/cache.py
Normal file
80
deckhand/barbican/cache.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from beaker.cache import CacheManager
|
||||||
|
from beaker.util import parse_cache_config_options
|
||||||
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
from deckhand.conf import config
|
||||||
|
|
||||||
|
CONF = config.CONF
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_CACHE_OPTS = {
|
||||||
|
'cache.type': 'memory',
|
||||||
|
'expire': CONF.barbican.cache_timeout,
|
||||||
|
}
|
||||||
|
_CACHE = CacheManager(**parse_cache_config_options(_CACHE_OPTS))
|
||||||
|
_BARBICAN_CACHE = _CACHE.get_cache('barbican_cache')
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE(felipemonteiro): The functions below realize a lookup and reverse-lookup
|
||||||
|
# to allow for much faster retrieval of encrypted data from Barbican, which
|
||||||
|
# doesn't currently support batched requests in its Secrets API. This behavior
|
||||||
|
# is necessary since Deckhand has to potentially retrieve and store up to
|
||||||
|
# dozens of secrets per request. Note that data for both lookup functions
|
||||||
|
# below are invalidated together, as they are tied to the same cache.
|
||||||
|
|
||||||
|
def lookup_by_ref(barbicanclient, secret_ref):
|
||||||
|
"""Look up secret object using secret reference.
|
||||||
|
|
||||||
|
Allows for quick lookup of secret payloads using ``secret_ref`` via
|
||||||
|
caching.
|
||||||
|
"""
|
||||||
|
def do_lookup():
|
||||||
|
"""Returns secret object stored in Barbican."""
|
||||||
|
return barbicanclient.call("secrets.get", secret_ref)
|
||||||
|
|
||||||
|
if CONF.barbican.enable_cache:
|
||||||
|
return _BARBICAN_CACHE.get(key=secret_ref, createfunc=do_lookup)
|
||||||
|
else:
|
||||||
|
return do_lookup()
|
||||||
|
|
||||||
|
|
||||||
|
def lookup_by_payload(barbicanclient, **kwargs):
|
||||||
|
"""Look up secret reference using the secret payload.
|
||||||
|
|
||||||
|
Allows for quick lookup of secret references using ``secret_payload`` via
|
||||||
|
caching (essentially a reverse-lookup).
|
||||||
|
|
||||||
|
Useful for ensuring that documents with the same secret payload (which
|
||||||
|
occurs when the same document is recreated across different revisions)
|
||||||
|
persist the same secret reference in the database -- and thus quicker
|
||||||
|
future ``lookup_by_ref`` lookups.
|
||||||
|
"""
|
||||||
|
def do_lookup():
|
||||||
|
"""Returns secret Barbican reference."""
|
||||||
|
secret = barbicanclient.call("secrets.create", **kwargs)
|
||||||
|
return secret.store()
|
||||||
|
|
||||||
|
secret_payload = kwargs['payload']
|
||||||
|
|
||||||
|
if CONF.barbican.enable_cache:
|
||||||
|
return _BARBICAN_CACHE.get(key=secret_payload, createfunc=do_lookup)
|
||||||
|
else:
|
||||||
|
return do_lookup()
|
||||||
|
|
||||||
|
|
||||||
|
def invalidate():
|
||||||
|
_BARBICAN_CACHE.clear()
|
@ -20,6 +20,7 @@ from oslo_serialization import base64
|
|||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
from deckhand.barbican import cache
|
||||||
from deckhand.barbican import client_wrapper
|
from deckhand.barbican import client_wrapper
|
||||||
from deckhand import errors
|
from deckhand import errors
|
||||||
from deckhand import types
|
from deckhand import types
|
||||||
@ -145,8 +146,7 @@ class BarbicanDriver(object):
|
|||||||
LOG.info('Storing encrypted document data in Barbican.')
|
LOG.info('Storing encrypted document data in Barbican.')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
secret = self.barbicanclient.call("secrets.create", **kwargs)
|
secret_ref = cache.lookup_by_payload(self.barbicanclient, **kwargs)
|
||||||
secret_ref = secret.store()
|
|
||||||
except (barbicanclient.exceptions.HTTPAuthError,
|
except (barbicanclient.exceptions.HTTPAuthError,
|
||||||
barbicanclient.exceptions.HTTPClientError) as e:
|
barbicanclient.exceptions.HTTPClientError) as e:
|
||||||
LOG.exception(str(e))
|
LOG.exception(str(e))
|
||||||
@ -180,7 +180,7 @@ class BarbicanDriver(object):
|
|||||||
def get_secret(self, secret_ref, src_doc):
|
def get_secret(self, secret_ref, src_doc):
|
||||||
"""Get a secret."""
|
"""Get a secret."""
|
||||||
try:
|
try:
|
||||||
secret = self.barbicanclient.call("secrets.get", secret_ref)
|
secret = cache.lookup_by_ref(self.barbicanclient, secret_ref)
|
||||||
except (barbicanclient.exceptions.HTTPAuthError,
|
except (barbicanclient.exceptions.HTTPAuthError,
|
||||||
barbicanclient.exceptions.HTTPClientError) as e:
|
barbicanclient.exceptions.HTTPClientError) as e:
|
||||||
LOG.exception(str(e))
|
LOG.exception(str(e))
|
||||||
@ -204,6 +204,9 @@ class BarbicanDriver(object):
|
|||||||
def delete_secret(self, secret_ref):
|
def delete_secret(self, secret_ref):
|
||||||
"""Delete a secret."""
|
"""Delete a secret."""
|
||||||
try:
|
try:
|
||||||
|
# NOTE(felipemonteiro): No cache invalidation is performed here
|
||||||
|
# as the only API that invokes this method is DELETE /revisions
|
||||||
|
# which also invalidates the entire Barbican cache.
|
||||||
return self.barbicanclient.call("secrets.delete", secret_ref)
|
return self.barbicanclient.call("secrets.delete", secret_ref)
|
||||||
except (barbicanclient.exceptions.HTTPAuthError,
|
except (barbicanclient.exceptions.HTTPAuthError,
|
||||||
barbicanclient.exceptions.HTTPServerError) as e:
|
barbicanclient.exceptions.HTTPServerError) as e:
|
||||||
|
@ -26,8 +26,8 @@ barbican_group = cfg.OptGroup(
|
|||||||
|
|
||||||
|
|
||||||
barbican_opts = [
|
barbican_opts = [
|
||||||
# TODO(fmontei): Drop these options and related group once Keystone
|
# TODO(felipemonteiro): Drop this option once Keystone endpoint lookup is
|
||||||
# endpoint lookup is used instead.
|
# implemented.
|
||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
'api_endpoint',
|
'api_endpoint',
|
||||||
sample_default='http://barbican.example.org:9311/',
|
sample_default='http://barbican.example.org:9311/',
|
||||||
@ -35,7 +35,17 @@ barbican_opts = [
|
|||||||
cfg.IntOpt(
|
cfg.IntOpt(
|
||||||
'max_workers', default=10,
|
'max_workers', default=10,
|
||||||
help='Maximum number of threads used to call secret storage service '
|
help='Maximum number of threads used to call secret storage service '
|
||||||
'concurrently.')
|
'concurrently.'),
|
||||||
|
# TODO(felipemonteiro): This is better off being removed because the same
|
||||||
|
# effect can be achieved through per-test gabbi fixtures that clean up
|
||||||
|
# the cache between tests.
|
||||||
|
cfg.BoolOpt('enable_cache', default=True,
|
||||||
|
help="Whether to enable Barbican secret caching. Useful "
|
||||||
|
"for testing to avoid cross-test caching conflicts."),
|
||||||
|
cfg.StrOpt(
|
||||||
|
'cache_timeout', default=3600,
|
||||||
|
help="How long (in seconds) Barbican secret reference/payload lookup "
|
||||||
|
"results should remain cached in memory.")
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@ import functools
|
|||||||
|
|
||||||
import falcon
|
import falcon
|
||||||
|
|
||||||
|
from deckhand.barbican import cache as barbican_cache
|
||||||
from deckhand.engine import cache as engine_cache
|
from deckhand.engine import cache as engine_cache
|
||||||
|
|
||||||
|
|
||||||
@ -125,4 +126,5 @@ def sanitize_params(allowed_params):
|
|||||||
|
|
||||||
def invalidate_cache_data():
|
def invalidate_cache_data():
|
||||||
"""Invalidate all data associated with document rendering."""
|
"""Invalidate all data associated with document rendering."""
|
||||||
|
barbican_cache.invalidate()
|
||||||
engine_cache.invalidate()
|
engine_cache.invalidate()
|
||||||
|
@ -8,6 +8,7 @@ development_mode = false
|
|||||||
policy_file = policy.yaml
|
policy_file = policy.yaml
|
||||||
|
|
||||||
[barbican]
|
[barbican]
|
||||||
|
enable_cache = false
|
||||||
|
|
||||||
[database]
|
[database]
|
||||||
connection = ${AIRSHIP_DECKHAND_DATABASE_URL}
|
connection = ${AIRSHIP_DECKHAND_DATABASE_URL}
|
||||||
|
0
deckhand/tests/unit/barbican/__init__.py
Normal file
0
deckhand/tests/unit/barbican/__init__.py
Normal file
120
deckhand/tests/unit/barbican/test_cache.py
Normal file
120
deckhand/tests/unit/barbican/test_cache.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import mock
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
from deckhand.barbican import cache
|
||||||
|
from deckhand.tests import test_utils
|
||||||
|
from deckhand.tests.unit import base as test_base
|
||||||
|
|
||||||
|
|
||||||
|
class BarbicanCacheTest(test_base.DeckhandTestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(BarbicanCacheTest, self).setUp()
|
||||||
|
self.secret_ref = test_utils.rand_barbican_ref()
|
||||||
|
self.secret_payload = 'very-secret-payload'
|
||||||
|
# Clear the cache between tests.
|
||||||
|
cache.invalidate()
|
||||||
|
|
||||||
|
def _mock_barbicanclient(self):
|
||||||
|
def call_barbican(action, *args, **kwargs):
|
||||||
|
if action == "secrets.create":
|
||||||
|
return mock.Mock(**{'store.return_value': self.secret_ref})
|
||||||
|
elif action == "secrets.get":
|
||||||
|
return mock.Mock(payload=self.secret_payload)
|
||||||
|
|
||||||
|
mock_barbicanclient = mock.Mock()
|
||||||
|
mock_barbicanclient.call.side_effect = call_barbican
|
||||||
|
|
||||||
|
return mock_barbicanclient
|
||||||
|
|
||||||
|
@property
|
||||||
|
def barbicanclient(self):
|
||||||
|
return self._mock_barbicanclient()
|
||||||
|
|
||||||
|
def test_lookup_by_ref_cache(self):
|
||||||
|
"""Validate ``lookup_by_ref`` caching works.
|
||||||
|
|
||||||
|
Passing in None in lieu of an actual barbican client (or mock object)
|
||||||
|
proves that:
|
||||||
|
|
||||||
|
* if the payload is in the cache, then no error is thrown since the
|
||||||
|
cache is hit so no further processing is performed, where otherwise a
|
||||||
|
method would be called on `None`
|
||||||
|
* if the payload is not in the cache, then following logic above,
|
||||||
|
method is called on `None`, raising AttributeError
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Validate that caching the ref returns expected payload.
|
||||||
|
secret = cache.lookup_by_ref(self.barbicanclient, self.secret_ref)
|
||||||
|
self.assertEqual(self.secret_payload, secret.payload)
|
||||||
|
|
||||||
|
# Validate that the cache actually works.
|
||||||
|
next_secret = cache.lookup_by_ref(None, self.secret_ref)
|
||||||
|
self.assertEqual(secret.payload, next_secret.payload)
|
||||||
|
|
||||||
|
# Validate that the reverse cache works.
|
||||||
|
kwargs = {'payload': secret.payload}
|
||||||
|
secret_ref = cache.lookup_by_payload(self.barbicanclient, **kwargs)
|
||||||
|
self.assertEqual(self.secret_ref, secret_ref)
|
||||||
|
|
||||||
|
# Different ref isn't in cache - expect AttributeError.
|
||||||
|
with testtools.ExpectedException(AttributeError):
|
||||||
|
cache.lookup_by_ref(None, secret_ref='uh-oh')
|
||||||
|
|
||||||
|
# Invalidate the cache and ensure the original data isn't there.
|
||||||
|
cache.invalidate()
|
||||||
|
|
||||||
|
# The cache won't be hit this time - expect AttributeError.
|
||||||
|
with testtools.ExpectedException(AttributeError):
|
||||||
|
cache.lookup_by_ref(None, self.secret_ref)
|
||||||
|
|
||||||
|
def test_lookup_by_payload_cache(self):
|
||||||
|
"""Validate ``lookup_by_payload`` caching works.
|
||||||
|
|
||||||
|
Passing in None in lieu of an actual barbican client (or mock object)
|
||||||
|
proves that:
|
||||||
|
|
||||||
|
* if the payload is in the cache, then no error is thrown since the
|
||||||
|
cache is hit so no further processing is performed, where otherwise a
|
||||||
|
method would be called on `None`
|
||||||
|
* if the payload is not in the cache, then following logic above,
|
||||||
|
method is called on `None`, raising AttributeError
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Validate that caching the payload returns expected ref.
|
||||||
|
kwargs = {'payload': self.secret_payload}
|
||||||
|
secret_ref = cache.lookup_by_payload(self.barbicanclient, **kwargs)
|
||||||
|
self.assertEqual(self.secret_ref, secret_ref)
|
||||||
|
|
||||||
|
# Validate that the cache actually works.
|
||||||
|
next_secret_ref = cache.lookup_by_payload(None, **kwargs)
|
||||||
|
self.assertEqual(secret_ref, next_secret_ref)
|
||||||
|
|
||||||
|
# Validate that the reverse cache works.
|
||||||
|
secret = cache.lookup_by_ref(self.barbicanclient, secret_ref)
|
||||||
|
self.assertEqual(self.secret_payload, secret.payload)
|
||||||
|
|
||||||
|
# Different payload isn't in cache - expect AttributeError.
|
||||||
|
with testtools.ExpectedException(AttributeError):
|
||||||
|
cache.lookup_by_payload(None, payload='uh-oh')
|
||||||
|
|
||||||
|
# Invalidate the cache and ensure the original data isn't there.
|
||||||
|
cache.invalidate()
|
||||||
|
|
||||||
|
# The cache won't be hit this time - expect AttributeError.
|
||||||
|
with testtools.ExpectedException(AttributeError):
|
||||||
|
cache.lookup_by_payload(None, **kwargs)
|
Loading…
Reference in New Issue
Block a user