Cache attributes with custom handling
Previously, all caching of attribute values was done via the Attributes object. However, some resource types override Resource.get_attribute() to do custom handling of the trailing attribute path or dynamic attribute names, and in these cases the resulting values were not cached (since they don't go through the Attributes object). This patch adds a caching step for these resources: * OS::Senlin::Cluster * OS::Heat::ResourceChain * OS::Heat::ResourceGroup * OS::Heat::AutoscalingGroup * OS::Heat::SoftwareDeployment * OS::Heat::SoftwareDeploymentGroup * TemplateResource * AWS::CloudFormation::Stack Change-Id: I07ac22cc4370a79bd8712e2431fa3272115bc0eb Co-Authored-By: Crag Wolfe <cwolfe@redhat.com> Partial-Bug: #1660831
This commit is contained in:
parent
bc97d4d8e0
commit
45e4c53f78
|
@ -146,7 +146,7 @@ class Attributes(collections.Mapping):
|
||||||
def __init__(self, res_name, schema, resolver):
|
def __init__(self, res_name, schema, resolver):
|
||||||
self._resource_name = res_name
|
self._resource_name = res_name
|
||||||
self._resolver = resolver
|
self._resolver = resolver
|
||||||
self._attributes = Attributes._make_attributes(schema)
|
self.set_schema(schema)
|
||||||
self.reset_resolved_values()
|
self.reset_resolved_values()
|
||||||
|
|
||||||
assert ALL_ATTRIBUTES not in schema, \
|
assert ALL_ATTRIBUTES not in schema, \
|
||||||
|
@ -159,6 +159,20 @@ class Attributes(collections.Mapping):
|
||||||
self._has_new_resolved = False
|
self._has_new_resolved = False
|
||||||
self._resolved_values = {}
|
self._resolved_values = {}
|
||||||
|
|
||||||
|
def set_schema(self, schema):
|
||||||
|
self._attributes = self._make_attributes(schema)
|
||||||
|
|
||||||
|
def get_cache_mode(self, attribute_name):
|
||||||
|
"""Return the cache mode for the specified attribute.
|
||||||
|
|
||||||
|
If the attribute is not defined in the schema, the default cache
|
||||||
|
mode (CACHE_LOCAL) is returned.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self._attributes[attribute_name].schema.cache_mode
|
||||||
|
except KeyError:
|
||||||
|
return Schema.CACHE_LOCAL
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _make_attributes(schema):
|
def _make_attributes(schema):
|
||||||
return dict((n, Attribute(n, d)) for n, d in schema.items())
|
return dict((n, Attribute(n, d)) for n, d in schema.items())
|
||||||
|
@ -229,10 +243,7 @@ class Attributes(collections.Mapping):
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cached_attrs(self):
|
def cached_attrs(self):
|
||||||
# do not return an empty dict
|
return self._resolved_values
|
||||||
if self._resolved_values:
|
|
||||||
return self._resolved_values
|
|
||||||
return None
|
|
||||||
|
|
||||||
@cached_attrs.setter
|
@cached_attrs.setter
|
||||||
def cached_attrs(self, c_attrs):
|
def cached_attrs(self, c_attrs):
|
||||||
|
@ -242,6 +253,10 @@ class Attributes(collections.Mapping):
|
||||||
self._resolved_values = c_attrs
|
self._resolved_values = c_attrs
|
||||||
self._has_new_resolved = False
|
self._has_new_resolved = False
|
||||||
|
|
||||||
|
def set_cached_attr(self, key, value):
|
||||||
|
self._resolved_values[key] = value
|
||||||
|
self._has_new_resolved = True
|
||||||
|
|
||||||
def has_new_cached_attrs(self):
|
def has_new_cached_attrs(self):
|
||||||
"""Returns True if cached_attrs have changed
|
"""Returns True if cached_attrs have changed
|
||||||
|
|
||||||
|
@ -269,8 +284,7 @@ class Attributes(collections.Mapping):
|
||||||
self._validate_type(attrib, value)
|
self._validate_type(attrib, value)
|
||||||
# only store if not None, it may resolve to an actual value
|
# only store if not None, it may resolve to an actual value
|
||||||
# on subsequent calls
|
# on subsequent calls
|
||||||
self._has_new_resolved = True
|
self.set_cached_attr(key, value)
|
||||||
self._resolved_values[key] = value
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
|
|
|
@ -42,6 +42,7 @@ from heat.engine import rsrc_defn
|
||||||
from heat.engine import scheduler
|
from heat.engine import scheduler
|
||||||
from heat.engine import status
|
from heat.engine import status
|
||||||
from heat.engine import support
|
from heat.engine import support
|
||||||
|
from heat.engine import sync_point
|
||||||
from heat.objects import resource as resource_objects
|
from heat.objects import resource as resource_objects
|
||||||
from heat.objects import resource_data as resource_data_objects
|
from heat.objects import resource_data as resource_data_objects
|
||||||
from heat.objects import resource_properties_data as rpd_objects
|
from heat.objects import resource_properties_data as rpd_objects
|
||||||
|
@ -281,7 +282,7 @@ class Resource(status.ResourceStatus):
|
||||||
self, resource.data)
|
self, resource.data)
|
||||||
except exception.NotFound:
|
except exception.NotFound:
|
||||||
self._data = {}
|
self._data = {}
|
||||||
self.attributes.cached_attrs = resource.attr_data
|
self.attributes.cached_attrs = resource.attr_data or None
|
||||||
self._attr_data_id = resource.attr_data_id
|
self._attr_data_id = resource.attr_data_id
|
||||||
self._rsrc_metadata = resource.rsrc_metadata
|
self._rsrc_metadata = resource.rsrc_metadata
|
||||||
self._stored_properties_data = resource.properties_data
|
self._stored_properties_data = resource.properties_data
|
||||||
|
@ -922,7 +923,7 @@ class Resource(status.ResourceStatus):
|
||||||
for attr in attrs:
|
for attr in attrs:
|
||||||
path = (attr,) if isinstance(attr, six.string_types) else attr
|
path = (attr,) if isinstance(attr, six.string_types) else attr
|
||||||
try:
|
try:
|
||||||
yield attr, self.get_attribute(*path)
|
yield attr, self._get_attribute_caching(*path)
|
||||||
except exception.InvalidTemplateAttribute as ita:
|
except exception.InvalidTemplateAttribute as ita:
|
||||||
LOG.info('%s', ita)
|
LOG.info('%s', ita)
|
||||||
|
|
||||||
|
@ -2160,6 +2161,24 @@ class Resource(status.ResourceStatus):
|
||||||
|
|
||||||
return attributes.select_from_attribute(attribute, path)
|
return attributes.select_from_attribute(attribute, path)
|
||||||
|
|
||||||
|
def _get_attribute_caching(self, key, *path):
|
||||||
|
cache_custom = ((self.attributes.get_cache_mode(key) !=
|
||||||
|
attributes.Schema.CACHE_NONE) and
|
||||||
|
(type(self).get_attribute != Resource.get_attribute))
|
||||||
|
if cache_custom:
|
||||||
|
if path:
|
||||||
|
full_key = sync_point.str_pack_tuple((key,) + path)
|
||||||
|
else:
|
||||||
|
full_key = key
|
||||||
|
if full_key in self.attributes.cached_attrs:
|
||||||
|
return self.attributes.cached_attrs[full_key]
|
||||||
|
|
||||||
|
attr_val = self.get_attribute(key, *path)
|
||||||
|
|
||||||
|
if cache_custom:
|
||||||
|
self.attributes.set_cached_attr(full_key, attr_val)
|
||||||
|
return attr_val
|
||||||
|
|
||||||
def FnGetAtt(self, key, *path):
|
def FnGetAtt(self, key, *path):
|
||||||
"""For the intrinsic function Fn::GetAtt.
|
"""For the intrinsic function Fn::GetAtt.
|
||||||
|
|
||||||
|
@ -2175,7 +2194,7 @@ class Resource(status.ResourceStatus):
|
||||||
attribute = self.stack.cache_data_resource_attribute(
|
attribute = self.stack.cache_data_resource_attribute(
|
||||||
self.name, complex_key)
|
self.name, complex_key)
|
||||||
return attribute
|
return attribute
|
||||||
return self.get_attribute(key, *path)
|
return self._get_attribute_caching(key, *path)
|
||||||
|
|
||||||
def FnGetAtts(self):
|
def FnGetAtts(self):
|
||||||
"""For the intrinsic function get_attr which returns all attributes.
|
"""For the intrinsic function get_attr which returns all attributes.
|
||||||
|
|
|
@ -11,8 +11,6 @@
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import weakref
|
|
||||||
|
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
from requests import exceptions
|
from requests import exceptions
|
||||||
import six
|
import six
|
||||||
|
@ -120,9 +118,7 @@ class TemplateResource(stack_resource.StackResource):
|
||||||
tmpl, self.stack.env.param_defaults)
|
tmpl, self.stack.env.param_defaults)
|
||||||
|
|
||||||
self.attributes_schema.update(self.base_attributes_schema)
|
self.attributes_schema.update(self.base_attributes_schema)
|
||||||
self.attributes = attributes.Attributes(
|
self.attributes.set_schema(self.attributes_schema)
|
||||||
self.name, self.attributes_schema,
|
|
||||||
self._make_resolver(weakref.ref(self)))
|
|
||||||
|
|
||||||
def child_params(self):
|
def child_params(self):
|
||||||
"""Override method of child_params for the resource.
|
"""Override method of child_params for the resource.
|
||||||
|
|
|
@ -73,7 +73,7 @@ def update_input_data(context, entity_id, current_traversal,
|
||||||
return rows_updated
|
return rows_updated
|
||||||
|
|
||||||
|
|
||||||
def _str_pack_tuple(t):
|
def str_pack_tuple(t):
|
||||||
return u'tuple:' + str(t)
|
return u'tuple:' + str(t)
|
||||||
|
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ def _serialize(d):
|
||||||
d2 = {}
|
d2 = {}
|
||||||
for k, v in d.items():
|
for k, v in d.items():
|
||||||
if isinstance(k, tuple):
|
if isinstance(k, tuple):
|
||||||
k = _str_pack_tuple(k)
|
k = str_pack_tuple(k)
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
v = _serialize(v)
|
v = _serialize(v)
|
||||||
d2[k] = v
|
d2[k] = v
|
||||||
|
|
Loading…
Reference in New Issue