Rework caching

Replace caching solution:
- cache on the proxy layer (API communication)
- support caching all GET calls
- add possibility to bypass cache (important for _wait_for_ operations)

Cheery-Picked-From: https://review.opendev.org/c/openstack/openstacksdk/+/805851

Change-Id: I2c8ae2c59d15c750ea8ebd3031ffdd2ced2421ed
This commit is contained in:
Artem Goncharov 2021-08-24 17:31:14 +02:00 committed by Artem Goncharov
parent 975cabbdd8
commit 46c6ed8a8d
25 changed files with 322 additions and 96 deletions

View File

@ -223,11 +223,32 @@ Different cloud behaviors are also differently expensive to deal with. If you
want to get really crazy and tweak stuff, you can specify different expiration
times on a per-resource basis by passing values, in seconds to an expiration
mapping keyed on the singular name of the resource. A value of `-1` indicates
that the resource should never expire.
that the resource should never expire. Not specifying a value (same as
specifying `0`) indicates that no caching for this resource should be done.
`openstacksdk` only caches `GET` request responses for the queries which have
non-zero expiration time defined. Caching key contains url and request
parameters, therefore no collisions are expected.
`openstacksdk` does not actually cache anything itself, but it collects
and presents the cache information so that your various applications that
are connecting to OpenStack can share a cache should you desire.
The expiration time key is constructed (joined with `.`) in the same way as the
metrics are emmited:
* service type
* meaningful resource url segments (i.e. `/servers` results in `servers`,
`/servers/ID` results in `server`, `/servers/ID/metadata/KEY` results in
`server.metadata`
Non `GET` requests cause cache invalidation based on the caching key prefix so
that i.e. `PUT` request to `/images/ID` will invalidate all images cache (list
and all individual entries). Moreover it is possible to explicitly pass
`_sdk_skip_cache` parameter to the `proxy._get` function to bypass cache and
invalidate what is already there. This is happening automatically in the
`wait_for_status` methods where it is expected that resource is going to change
some of the attributes over the time. Forcing complete cache invalidation can
be achieved calling `conn._cache.invalidate`.
`openstacksdk` does not actually cache anything itself, but it collects and
presents the cache information so that your various applications that are
connecting to OpenStack can share a cache should you desire.
.. code-block:: yaml
@ -240,6 +261,9 @@ are connecting to OpenStack can share a cache should you desire.
expiration:
server: 5
flavor: -1
compute.servers: 5
compute.flavors: -1
image.images: 5
clouds:
mtvexx:
profile: vexxhost

View File

@ -102,26 +102,15 @@ class _OpenStackCloudMixin:
cache_class = self.config.get_cache_class()
cache_arguments = self.config.get_cache_arguments()
self._resource_caches = {}
self._cache_expirations = dict()
if cache_class != 'dogpile.cache.null':
self.cache_enabled = True
self._cache = self._make_cache(
cache_class, cache_expiration_time, cache_arguments)
expirations = self.config.get_cache_expirations()
for expire_key in expirations.keys():
# Only build caches for things we have list operations for
if getattr(
self, 'list_{0}'.format(expire_key), None):
self._resource_caches[expire_key] = self._make_cache(
cache_class, expirations[expire_key], cache_arguments)
self._SERVER_AGE = DEFAULT_SERVER_AGE
self._PORT_AGE = DEFAULT_PORT_AGE
self._FLOAT_AGE = DEFAULT_FLOAT_AGE
else:
self.cache_enabled = False
# TODO(gtema): delete it with the standalone cloud layer caching
def _fake_invalidate(unused):
pass
@ -148,15 +137,20 @@ class _OpenStackCloudMixin:
new_func.invalidate = _fake_invalidate
setattr(self, method, new_func)
# If server expiration time is set explicitly, use that. Otherwise
# fall back to whatever it was before
self._SERVER_AGE = self.config.get_cache_resource_expiration(
'server', self._SERVER_AGE)
self._PORT_AGE = self.config.get_cache_resource_expiration(
'port', self._PORT_AGE)
self._FLOAT_AGE = self.config.get_cache_resource_expiration(
'floating_ip', self._FLOAT_AGE)
# Uncoditionally create cache even with a "null" backend
self._cache = self._make_cache(
cache_class, cache_expiration_time, cache_arguments)
expirations = self.config.get_cache_expirations()
for expire_key in expirations.keys():
self._cache_expirations[expire_key] = \
expirations[expire_key]
# TODO(gtema): delete in next change
self._SERVER_AGE = 0
self._PORT_AGE = 0
self._FLOAT_AGE = 0
self._api_cache_keys = set()
self._container_cache = dict()
self._file_hash_cache = dict()

View File

@ -91,7 +91,7 @@ class Limits(resource.Resource):
rate = resource.Body("rate", type=list, list_type=RateLimit)
def fetch(self, session, requires_id=False, error_message=None,
base_path=None, **params):
base_path=None, skip_cache=False, **params):
"""Get the Limits resource.
:param session: The session to use for making this request.
@ -106,5 +106,5 @@ class Limits(resource.Resource):
session=session, requires_id=requires_id,
error_message=error_message,
base_path=base_path,
**params
)
skip_cache=skip_cache,
**params)

View File

@ -78,7 +78,7 @@ class Secret(resource.Resource):
payload_content_encoding = resource.Body('payload_content_encoding')
def fetch(self, session, requires_id=True,
base_path=None, error_message=None):
base_path=None, error_message=None, skip_cache=False):
request = self._prepare_request(requires_id=requires_id,
base_path=base_path)
@ -93,8 +93,10 @@ class Secret(resource.Resource):
# Only try to get the payload if a content type has been explicitly
# specified or if one was found in the metadata response
if content_type is not None:
payload = session.get(utils.urljoin(request.url, "payload"),
headers={"Accept": content_type})
payload = session.get(
utils.urljoin(request.url, "payload"),
headers={"Accept": content_type},
skip_cache=skip_cache)
response["payload"] = payload.text
# We already have the JSON here so don't call into _translate_response

View File

@ -82,7 +82,7 @@ class Claim(resource.Resource):
return self
def fetch(self, session, requires_id=True,
base_path=None, error_message=None):
base_path=None, error_message=None, skip_cache=False):
request = self._prepare_request(requires_id=requires_id,
base_path=base_path)
headers = {
@ -91,8 +91,8 @@ class Claim(resource.Resource):
}
request.headers.update(headers)
response = session.get(request.url,
headers=request.headers)
response = session.get(
request.url, headers=request.headers, skip_cache=False)
self._translate_response(response)
return self

View File

@ -112,7 +112,7 @@ class Message(resource.Resource):
query_params["marker"] = new_marker
def fetch(self, session, requires_id=True,
base_path=None, error_message=None):
base_path=None, error_message=None, skip_cache=False):
request = self._prepare_request(requires_id=requires_id,
base_path=base_path)
headers = {
@ -122,7 +122,8 @@ class Message(resource.Resource):
request.headers.update(headers)
response = session.get(request.url,
headers=headers)
headers=headers,
skip_cache=skip_cache)
self._translate_response(response)
return self

View File

@ -111,7 +111,7 @@ class Queue(resource.Resource):
query_params["marker"] = new_marker
def fetch(self, session, requires_id=True,
base_path=None, error_message=None):
base_path=None, error_message=None, skip_cache=False):
request = self._prepare_request(requires_id=requires_id,
base_path=base_path)
headers = {
@ -119,8 +119,8 @@ class Queue(resource.Resource):
"X-PROJECT-ID": self.project_id or session.get_project_id()
}
request.headers.update(headers)
response = session.get(request.url,
headers=headers)
response = session.get(
request.url, headers=headers, skip_cache=skip_cache)
self._translate_response(response)
return self

View File

@ -119,7 +119,7 @@ class Subscription(resource.Resource):
query_params["marker"] = new_marker
def fetch(self, session, requires_id=True,
base_path=None, error_message=None):
base_path=None, error_message=None, skip_cache=False):
request = self._prepare_request(requires_id=requires_id,
base_path=base_path)
headers = {
@ -128,8 +128,8 @@ class Subscription(resource.Resource):
}
request.headers.update(headers)
response = session.get(request.url,
headers=request.headers)
response = session.get(
request.url, headers=request.headers, skip_cache=skip_cache)
self._translate_response(response)
return self

View File

@ -33,8 +33,10 @@ class Info(resource.Resource):
staticweb = resource.Body("staticweb", type=dict)
tempurl = resource.Body("tempurl", type=dict)
def fetch(self, session, requires_id=False,
base_path=None, error_message=None):
def fetch(
self, session, requires_id=False,
base_path=None, skip_cache=False, error_message=None
):
"""Get a remote resource based on this instance.
:param session: The session to use for making this request.

View File

@ -168,7 +168,8 @@ class Stack(resource.Resource):
return resp.json()
def fetch(self, session, requires_id=True,
base_path=None, error_message=None, resolve_outputs=True):
base_path=None, error_message=None,
skip_cache=False, resolve_outputs=True):
if not self.allow_fetch:
raise exceptions.MethodNotSupported(self, "fetch")
@ -183,7 +184,8 @@ class Stack(resource.Resource):
# apply parameters again, what results in them being set doubled
if not resolve_outputs:
request.url = request.url + '?resolve_outputs=False'
response = session.get(request.url, microversion=microversion)
response = session.get(
request.url, microversion=microversion, skip_cache=skip_cache)
kwargs = {}
if error_message:
kwargs['error_message'] = error_message

View File

@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import functools
import urllib
from urllib.parse import urlparse
@ -84,21 +85,79 @@ class Proxy(adapter.Adapter):
log_name = 'openstack'
self.log = _log.setup_logging(log_name)
def _get_cache_key_prefix(self, url):
"""Calculate cache prefix for the url"""
name_parts = self._extract_name(
url, self.service_type,
self.session.get_project_id())
return '.'.join(
[self.service_type]
+ name_parts)
def _invalidate_cache(self, conn, key_prefix):
"""Invalidate all cache entries starting with given prefix"""
for k in set(conn._api_cache_keys):
if k.startswith(key_prefix):
conn._cache.delete(k)
conn._api_cache_keys.remove(k)
def request(
self, url, method, error_message=None,
raise_exc=False, connect_retries=1,
global_request_id=None, *args, **kwargs):
global_request_id=None,
*args, **kwargs):
conn = self._get_connection()
if not global_request_id:
conn = self._get_connection()
if conn:
# Per-request setting should take precedence
global_request_id = conn._global_request_id
# Per-request setting should take precedence
global_request_id = conn._global_request_id
key = None
key_prefix = self._get_cache_key_prefix(url)
# The caller might want to force cache bypass.
skip_cache = kwargs.pop('skip_cache', False)
if conn.cache_enabled:
# Construct cache key. It consists of:
# service.name_parts.URL.str(kwargs)
key = '.'.join(
[key_prefix, url, str(kwargs)]
)
# Track cache key for invalidating possibility
conn._api_cache_keys.add(key)
try:
response = super(Proxy, self).request(
url, method,
connect_retries=connect_retries, raise_exc=raise_exc,
global_request_id=global_request_id,
**kwargs)
if conn.cache_enabled and not skip_cache and method == 'GET':
# Get the object expiration time from config
# default to 0 to disable caching for this resource type
expiration_time = int(
conn._cache_expirations.get(key_prefix, 0))
# Get from cache or execute and cache
response = conn._cache.get_or_create(
key=key,
creator=super(Proxy, self).request,
creator_args=(
[url, method],
dict(
connect_retries=connect_retries,
raise_exc=raise_exc,
global_request_id=global_request_id,
**kwargs
)
),
expiration_time=expiration_time
)
else:
# invalidate cache if we send modification request or user
# asked for cache bypass
self._invalidate_cache(conn, key_prefix)
# Pass through the API request bypassing cache
response = super(Proxy, self).request(
url, method,
connect_retries=connect_retries, raise_exc=raise_exc,
global_request_id=global_request_id,
**kwargs)
for h in response.history:
self._report_stats(h)
self._report_stats(response)
@ -111,6 +170,7 @@ class Proxy(adapter.Adapter):
self._report_stats(None, url, method, e)
raise
@functools.lru_cache(maxsize=256)
def _extract_name(self, url, service_type=None, project_id=None):
'''Produce a key name to use in logging/metrics from the URL path.
@ -484,7 +544,7 @@ class Proxy(adapter.Adapter):
@_check_resource(strict=False)
def _get(self, resource_type, value=None, requires_id=True,
base_path=None, **attrs):
base_path=None, skip_cache=False, **attrs):
"""Fetch a resource
:param resource_type: The type of resource to get.
@ -495,6 +555,8 @@ class Proxy(adapter.Adapter):
:param str base_path: Base part of the URI for fetching resources, if
different from
:data:`~openstack.resource.Resource.base_path`.
:param bool skip_cache: A boolean indicating whether optional API
cache should be skipped for this invocation.
:param dict attrs: Attributes to be passed onto the
:meth:`~openstack.resource.Resource.get`
method. These should correspond
@ -509,6 +571,7 @@ class Proxy(adapter.Adapter):
return res.fetch(
self, requires_id=requires_id, base_path=base_path,
skip_cache=skip_cache,
error_message="No {resource_type} found for {value}".format(
resource_type=resource_type.__name__, value=value))

View File

@ -1480,6 +1480,7 @@ class Resource(dict):
requires_id=True,
base_path=None,
error_message=None,
skip_cache=False,
**params,
):
"""Get a remote resource based on this instance.
@ -1492,6 +1493,8 @@ class Resource(dict):
different from :data:`~openstack.resource.Resource.base_path`.
:param str error_message: An Error message to be returned if
requested object does not exist.
:param bool skip_cache: A boolean indicating whether optional API
cache should be skipped for this invocation.
:param dict params: Additional parameters that can be consumed.
:return: This :class:`Resource` instance.
:raises: :exc:`~openstack.exceptions.MethodNotSupported` if
@ -1507,7 +1510,8 @@ class Resource(dict):
session = self._get_session(session)
microversion = self._get_microversion_for(session, 'fetch')
response = session.get(request.url, microversion=microversion,
params=params)
params=params,
skip_cache=skip_cache)
kwargs = {}
if error_message:
kwargs['error_message'] = error_message
@ -2078,7 +2082,7 @@ def wait_for_status(
timeout=wait,
message=msg,
wait=interval):
resource = resource.fetch(session)
resource = resource.fetch(session, skip_cache=True)
if not resource:
raise exceptions.ResourceFailure(
@ -2120,7 +2124,7 @@ def wait_for_delete(session, resource, interval, wait):
id=resource.id),
wait=interval):
try:
resource = resource.fetch(session)
resource = resource.fetch(session, skip_cache=True)
if not resource:
return orig_resource
if resource.status.lower() == 'deleted':

View File

@ -535,12 +535,13 @@ class TestMemoryCache(base.TestCase):
down_port = test_port.TestPort.mock_neutron_port_create_rep['port']
active_port = down_port.copy()
active_port['status'] = 'ACTIVE'
# We're testing to make sure a query string isn't passed when we're
# caching, but that the results are still filtered.
# We're testing to make sure a query string is passed when we're
# caching (cache by url), and that the results are still filtered.
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'network', 'public', append=['v2.0', 'ports']),
'network', 'public', append=['v2.0', 'ports'],
qs_elements=['status=DOWN']),
json={'ports': [
down_port,
active_port,

View File

@ -89,7 +89,8 @@ class TestQuotaSet(base.TestCase):
self.sess.get.assert_called_with(
'/os-quota-sets/proj',
microversion=1,
params={})
params={},
skip_cache=False)
self.assertEqual(BASIC_EXAMPLE['backups'], sot.backups)
self.assertEqual({}, sot.reservation)
@ -110,7 +111,8 @@ class TestQuotaSet(base.TestCase):
self.sess.get.assert_called_with(
'/os-quota-sets/proj',
microversion=1,
params={'usage': True})
params={'usage': True},
skip_cache=False)
self.assertEqual(
USAGE_EXAMPLE['backups']['limit'],

View File

@ -407,7 +407,8 @@ class TestImage(base.TestCase):
self.sess.get.assert_has_calls(
[mock.call('images/IDENTIFIER/file',
stream=False),
mock.call('images/IDENTIFIER', microversion=None, params={})])
mock.call('images/IDENTIFIER', microversion=None, params={},
skip_cache=False)])
self.assertEqual(rv, resp1)
@ -436,7 +437,8 @@ class TestImage(base.TestCase):
self.sess.get.assert_has_calls(
[mock.call('images/IDENTIFIER/file',
stream=False),
mock.call('images/IDENTIFIER', microversion=None, params={})])
mock.call('images/IDENTIFIER', microversion=None, params={},
skip_cache=False)])
self.assertEqual(rv, resp1)
@ -536,7 +538,7 @@ class TestImage(base.TestCase):
self.sess.get.assert_has_calls([
mock.call('images/' + EXAMPLE['name'], microversion=None,
params={}),
params={}, skip_cache=False),
mock.call('/images', headers={'Accept': 'application/json'},
microversion=None, params={'name': EXAMPLE['name']}),
mock.call('/images', headers={'Accept': 'application/json'},

View File

@ -113,8 +113,10 @@ class TestSecret(base.TestCase):
sess.get.assert_has_calls(
[mock.call("secrets/id",),
mock.call("secrets/id/payload",
headers={"Accept": content_type})])
mock.call(
"secrets/id/payload",
headers={"Accept": content_type},
skip_cache=False)])
self.assertEqual(rv.payload, payload)
self.assertEqual(rv.status, metadata["status"])

View File

@ -141,8 +141,8 @@ class TestClaim(base.TestCase):
"queue": FAKE1["queue_name"], "claim": FAKE1["id"]}
headers = {"Client-ID": "NEW_CLIENT_ID",
"X-PROJECT-ID": "NEW_PROJECT_ID"}
sess.get.assert_called_with(url,
headers=headers)
sess.get.assert_called_with(
url, headers=headers, skip_cache=False)
sess.get_project_id.assert_called_once_with()
sot._translate_response.assert_called_once_with(resp)
self.assertEqual(sot, res)
@ -160,8 +160,8 @@ class TestClaim(base.TestCase):
"queue": FAKE2["queue_name"], "claim": FAKE2["id"]}
headers = {"Client-ID": "OLD_CLIENT_ID",
"X-PROJECT-ID": "OLD_PROJECT_ID"}
sess.get.assert_called_with(url,
headers=headers)
sess.get.assert_called_with(
url, headers=headers, skip_cache=False)
sot._translate_response.assert_called_once_with(resp)
self.assertEqual(sot, res)

View File

@ -151,8 +151,8 @@ class TestMessage(base.TestCase):
'queue': FAKE1['queue_name'], 'message': FAKE1['id']}
headers = {'Client-ID': 'NEW_CLIENT_ID',
'X-PROJECT-ID': 'NEW_PROJECT_ID'}
sess.get.assert_called_with(url,
headers=headers)
sess.get.assert_called_with(
url, headers=headers, skip_cache=False)
sess.get_project_id.assert_called_once_with()
sot._translate_response.assert_called_once_with(resp)
self.assertEqual(sot, res)
@ -173,8 +173,8 @@ class TestMessage(base.TestCase):
res = sot.fetch(sess)
headers = {'Client-ID': 'OLD_CLIENT_ID',
'X-PROJECT-ID': 'OLD_PROJECT_ID'}
sess.get.assert_called_with(url,
headers=headers)
sess.get.assert_called_with(
url, headers=headers, skip_cache=False)
sot._translate_response.assert_called_once_with(resp)
self.assertEqual(sot, res)

View File

@ -109,8 +109,8 @@ class TestQueue(base.TestCase):
url = 'queues/%s' % FAKE1['name']
headers = {'Client-ID': 'NEW_CLIENT_ID',
'X-PROJECT-ID': 'NEW_PROJECT_ID'}
sess.get.assert_called_with(url,
headers=headers)
sess.get.assert_called_with(
url, headers=headers, skip_cache=False)
sess.get_project_id.assert_called_once_with()
sot._translate_response.assert_called_once_with(resp)
self.assertEqual(sot, res)
@ -127,8 +127,8 @@ class TestQueue(base.TestCase):
url = 'queues/%s' % FAKE2['name']
headers = {'Client-ID': 'OLD_CLIENT_ID',
'X-PROJECT-ID': 'OLD_PROJECT_ID'}
sess.get.assert_called_with(url,
headers=headers)
sess.get.assert_called_with(
url, headers=headers, skip_cache=False)
sot._translate_response.assert_called_once_with(resp)
self.assertEqual(sot, res)

View File

@ -127,8 +127,8 @@ class TestSubscription(base.TestCase):
"queue": FAKE1["queue_name"], "subscription": FAKE1["id"]}
headers = {"Client-ID": "NEW_CLIENT_ID",
"X-PROJECT-ID": "NEW_PROJECT_ID"}
sess.get.assert_called_with(url,
headers=headers)
sess.get.assert_called_with(
url, headers=headers, skip_cache=False)
sess.get_project_id.assert_called_once_with()
sot._translate_response.assert_called_once_with(resp)
self.assertEqual(sot, res)
@ -146,8 +146,8 @@ class TestSubscription(base.TestCase):
"queue": FAKE2["queue_name"], "subscription": FAKE2["id"]}
headers = {"Client-ID": "OLD_CLIENT_ID",
"X-PROJECT-ID": "OLD_PROJECT_ID"}
sess.get.assert_called_with(url,
headers=headers)
sess.get.assert_called_with(
url, headers=headers, skip_cache=False)
sot._translate_response.assert_called_once_with(resp)
self.assertEqual(sot, res)

View File

@ -232,11 +232,13 @@ class TestStack(base.TestCase):
self.assertEqual(sot, sot.fetch(sess))
sess.get.assert_called_with(
'stacks/{id}'.format(id=sot.id),
microversion=None)
microversion=None,
skip_cache=False)
sot.fetch(sess, resolve_outputs=False)
sess.get.assert_called_with(
'stacks/{id}?resolve_outputs=False'.format(id=sot.id),
microversion=None)
microversion=None,
skip_cache=False)
ex = self.assertRaises(exceptions.ResourceNotFound, sot.fetch, sess)
self.assertEqual('oops', str(ex))
ex = self.assertRaises(exceptions.ResourceNotFound, sot.fetch, sess)

View File

@ -10,6 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import copy
import queue
from unittest import mock
@ -35,7 +36,7 @@ class CreateableResource(resource.Resource):
class RetrieveableResource(resource.Resource):
allow_retrieve = True
allow_fetch = True
class ListableResource(resource.Resource):
@ -380,6 +381,7 @@ class TestProxyGet(base.TestCase):
self.res.fetch.assert_called_with(
self.sot, requires_id=True,
base_path=None,
skip_cache=mock.ANY,
error_message=mock.ANY)
self.assertEqual(rv, self.fake_result)
@ -390,6 +392,7 @@ class TestProxyGet(base.TestCase):
self.res._update.assert_called_once_with(**args)
self.res.fetch.assert_called_with(
self.sot, requires_id=True, base_path=None,
skip_cache=mock.ANY,
error_message=mock.ANY)
self.assertEqual(rv, self.fake_result)
@ -400,6 +403,7 @@ class TestProxyGet(base.TestCase):
connection=self.cloud, id=self.fake_id)
self.res.fetch.assert_called_with(
self.sot, requires_id=True, base_path=None,
skip_cache=mock.ANY,
error_message=mock.ANY)
self.assertEqual(rv, self.fake_result)
@ -412,6 +416,7 @@ class TestProxyGet(base.TestCase):
connection=self.cloud, id=self.fake_id)
self.res.fetch.assert_called_with(
self.sot, requires_id=True, base_path=base_path,
skip_cache=mock.ANY,
error_message=mock.ANY)
self.assertEqual(rv, self.fake_result)
@ -521,6 +526,116 @@ class TestExtractName(base.TestCase):
self.assertEqual(self.parts, results)
class TestProxyCache(base.TestCase):
class Res(resource.Resource):
base_path = 'fake'
allow_commit = True
allow_fetch = True
foo = resource.Body('foo')
def setUp(self):
super(TestProxyCache, self).setUp(
cloud_config_fixture='clouds_cache.yaml')
self.session = mock.Mock()
self.session._sdk_connection = self.cloud
self.session.get_project_id = mock.Mock(return_value='fake_prj')
self.response = mock.Mock()
self.response.status_code = 200
self.response.history = []
self.response.headers = {}
self.response.body = {}
self.response.json = mock.Mock(
return_value=self.response.body)
self.session.request = mock.Mock(
return_value=self.response)
self.sot = proxy.Proxy(self.session)
self.sot._connection = self.cloud
self.sot.service_type = 'srv'
def _get_key(self, id):
return (
f"srv.fake.fake/{id}."
"{'microversion': None, 'params': {}}")
def test_get_not_in_cache(self):
self.cloud._cache_expirations['srv.fake'] = 5
self.sot._get(self.Res, '1')
self.session.request.assert_called_with(
'fake/1',
'GET',
connect_retries=mock.ANY, raise_exc=mock.ANY,
global_request_id=mock.ANY,
endpoint_filter=mock.ANY,
headers=mock.ANY,
microversion=mock.ANY, params=mock.ANY
)
self.assertIn(
self._get_key(1),
self.cloud._api_cache_keys)
def test_get_from_cache(self):
key = self._get_key(2)
self.cloud._cache.set(key, self.response)
# set expiration for the resource to respect cache
self.cloud._cache_expirations['srv.fake'] = 5
self.sot._get(self.Res, '2')
self.session.request.assert_not_called()
def test_modify(self):
key = self._get_key(3)
self.cloud._cache.set(key, self.response)
self.cloud._api_cache_keys.add(key)
self.cloud._cache_expirations['srv.fake'] = 5
# Ensure first call gets value from cache
self.sot._get(self.Res, '3')
self.session.request.assert_not_called()
# update call invalidates the cache and triggers API
rs = self.Res.existing(id='3')
self.sot._update(self.Res, rs, foo='bar')
self.session.request.assert_called()
self.assertIsNotNone(self.cloud._cache.get(key))
self.assertEqual(
'NoValue',
type(self.cloud._cache.get(key)).__name__)
self.assertNotIn(key, self.cloud._api_cache_keys)
# next get call again triggers API
self.sot._get(self.Res, '3')
self.session.request.assert_called()
def test_get_bypass_cache(self):
key = self._get_key(4)
resp = copy.deepcopy(self.response)
resp.body = {'foo': 'bar'}
self.cloud._api_cache_keys.add(key)
self.cloud._cache.set(key, resp)
# set expiration for the resource to respect cache
self.cloud._cache_expirations['srv.fake'] = 5
self.sot._get(self.Res, '4', skip_cache=True)
self.session.request.assert_called()
# validate we got empty body as expected, and not what is in cache
self.assertEqual(dict(), self.response.body)
self.assertNotIn(key, self.cloud._api_cache_keys)
self.assertEqual(
'NoValue',
type(self.cloud._cache.get(key)).__name__)
class TestProxyCleanup(base.TestCase):
def setUp(self):

View File

@ -158,7 +158,8 @@ class TestProxyBase(base.TestCase):
res.fetch.assert_called_once_with(
proxy, requires_id=True,
base_path=None,
error_message=mock.ANY)
error_message=mock.ANY,
skip_cache=False)
def verify_head(
self, test_method, resource_type, base_path=None, *,

View File

@ -1620,7 +1620,8 @@ class TestResourceActions(base.TestCase):
self.sot._prepare_request.assert_called_once_with(
requires_id=True, base_path=None)
self.session.get.assert_called_once_with(
self.request.url, microversion=None, params={})
self.request.url, microversion=None, params={},
skip_cache=False)
self.assertIsNone(self.sot.microversion)
self.sot._translate_response.assert_called_once_with(self.response)
@ -1632,7 +1633,8 @@ class TestResourceActions(base.TestCase):
self.sot._prepare_request.assert_called_once_with(
requires_id=True, base_path=None)
self.session.get.assert_called_once_with(
self.request.url, microversion=None, params={'fields': 'a,b'})
self.request.url, microversion=None, params={'fields': 'a,b'},
skip_cache=False)
self.assertIsNone(self.sot.microversion)
self.sot._translate_response.assert_called_once_with(self.response)
@ -1654,7 +1656,8 @@ class TestResourceActions(base.TestCase):
sot._prepare_request.assert_called_once_with(
requires_id=True, base_path=None)
self.session.get.assert_called_once_with(
self.request.url, microversion='1.42', params={})
self.request.url, microversion='1.42', params={},
skip_cache=False)
self.assertEqual(sot.microversion, '1.42')
sot._translate_response.assert_called_once_with(self.response)
@ -1666,7 +1669,8 @@ class TestResourceActions(base.TestCase):
self.sot._prepare_request.assert_called_once_with(
requires_id=False, base_path=None)
self.session.get.assert_called_once_with(
self.request.url, microversion=None, params={})
self.request.url, microversion=None, params={},
skip_cache=False)
self.sot._translate_response.assert_called_once_with(self.response)
self.assertEqual(result, self.sot)
@ -1678,7 +1682,8 @@ class TestResourceActions(base.TestCase):
requires_id=False,
base_path='dummy')
self.session.get.assert_called_once_with(
self.request.url, microversion=None, params={})
self.request.url, microversion=None, params={},
skip_cache=False)
self.sot._translate_response.assert_called_once_with(self.response)
self.assertEqual(result, self.sot)

View File

@ -0,0 +1,4 @@
---
features:
- |
Add possibility to cache GET requests using dogpile cache.