diff --git a/heat/engine/check_resource.py b/heat/engine/check_resource.py index afcd10d9fe..58c614d876 100644 --- a/heat/engine/check_resource.py +++ b/heat/engine/check_resource.py @@ -23,6 +23,7 @@ from oslo_log import log as logging from heat.common import exception from heat.common.i18n import _LE from heat.common.i18n import _LI +from heat.engine import node_data from heat.engine import resource from heat.engine import scheduler from heat.engine import stack as parser @@ -49,11 +50,13 @@ class CheckResource(object): engine_id, rpc_client, thread_group_mgr, - msg_queue): + msg_queue, + input_data): self.engine_id = engine_id self._rpc_client = rpc_client self.thread_group_mgr = thread_group_mgr self.msg_queue = msg_queue + self.input_data = input_data def _try_steal_engine_lock(self, cnxt, resource_id): rs_obj = resource_objects.Resource.get_obj(cnxt, @@ -113,7 +116,7 @@ class CheckResource(object): new_res_id = rsrc.make_replacement(tmpl.id) LOG.info(_LI("Replacing resource with new id %s"), new_res_id) - rpc_data = sync_point.serialize_input_data(resource_data) + rpc_data = sync_point.serialize_input_data(self.input_data) self._rpc_client.check_resource(cnxt, new_res_id, current_traversal, @@ -129,7 +132,7 @@ class CheckResource(object): return True except exception.UpdateInProgress: if self._try_steal_engine_lock(cnxt, rsrc.id): - rpc_data = sync_point.serialize_input_data(resource_data) + rpc_data = sync_point.serialize_input_data(self.input_data) # set the resource state as failed status_reason = ('Worker went down ' 'during resource %s' % rsrc.action) @@ -286,17 +289,9 @@ class CheckResource(object): def load_resource(cnxt, resource_id, resource_data, is_update): - if is_update: - cache_data = {in_data.get( - 'name'): in_data for in_data in resource_data.values() - if in_data is not None} - else: - # no data to resolve in cleanup phase - cache_data = {} - try: return resource.Resource.load(cnxt, resource_id, - is_update, cache_data) + is_update, resource_data) except (exception.ResourceNotFound, exception.NotFound): # can be ignored return None, None, None @@ -319,14 +314,11 @@ def construct_input_data(rsrc, curr_stack): dep_attrs = curr_stack.get_dep_attrs( six.itervalues(curr_stack.resources), rsrc.name) - input_data = {'id': rsrc.id, - 'name': rsrc.name, - 'reference_id': rsrc.get_reference_id(), - 'attrs': _resolve_attributes(dep_attrs, rsrc), - 'status': rsrc.status, - 'action': rsrc.action, - 'uuid': rsrc.uuid} - return input_data + input_data = node_data.NodeData(rsrc.id, rsrc.name, rsrc.uuid, + rsrc.get_reference_id(), + _resolve_attributes(dep_attrs, rsrc), + rsrc.action, rsrc.status) + return input_data.as_dict() def check_stack_complete(cnxt, stack, current_traversal, sender_id, deps, diff --git a/heat/engine/node_data.py b/heat/engine/node_data.py new file mode 100644 index 0000000000..91a1143ca6 --- /dev/null +++ b/heat/engine/node_data.py @@ -0,0 +1,115 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import six + + +class NodeData(object): + """Data about a node in the graph, to be passed along to other nodes.""" + + __slots__ = ('primary_key', 'name', 'uuid', + '_reference_id', '_attributes', + 'action', 'status') + + def __init__(self, primary_key, resource_name, uuid, + reference_id, attributes, action, status): + """Initialise with data about the resource processed by the node. + + :param primary_key: the ID of the resource in the database + :param name: the logical resource name + :param uuid: the UUID of the resource + :param reference_id: the value to be returned by get_resource + :param attributes: dict of attributes values to be returned by get_attr + :param action: the last resource action + :param status: the status of the last action + """ + self.primary_key = primary_key + self.name = resource_name + self.uuid = uuid + self._reference_id = reference_id + self._attributes = attributes + self.action = action + self.status = status + + def reference_id(self): + """Return the reference ID of the resource. + + i.e. the result that the {get_resource: } intrinsic function should + return for this resource. + """ + return self._reference_id + + def attributes(self): + """Return a dict of all available top-level attribute values.""" + return {k: v + for k, v in self._attributes.items() + if isinstance(k, six.string_types)} + + def attribute(self, attr_name): + """Return the specified attribute value.""" + return self._attributes[attr_name] + + def attribute_names(self): + """Iterate over valid top-level attribute names.""" + for key in self._attributes: + if isinstance(key, six.string_types): + yield key + else: + yield key[0] + + def as_dict(self): + """Return a dict representation of the data. + + This is the format that is serialised and stored in the database's + SyncPoints. + """ + return { + 'id': self.primary_key, + 'name': self.name, + 'reference_id': self.reference_id(), + 'attrs': dict(self._attributes), + 'status': self.status, + 'action': self.action, + 'uuid': self.uuid, + } + + @classmethod + def from_dict(cls, node_data): + """Create a new NodeData object from deserialised data. + + This reads the format that is stored in the database, and is the + inverse of as_dict(). + """ + if isinstance(node_data, cls): + return node_data + + return cls(node_data.get('id'), + node_data.get('name'), + node_data.get('uuid'), + node_data.get('reference_id'), + node_data.get('attrs', {}), + node_data.get('action'), + node_data.get('status')) + + +def load_resources_data(data): + """Return the data for all of the resources that meet at a SyncPoint. + + The input is the input_data dict from a SyncPoint received over RPC. The + keys (which are ignored) are resource primary keys. + + The output is a dict of NodeData objects with the resource names as the + keys. + """ + nodes = (NodeData.from_dict(nd) for nd in data.values() if nd is not None) + return {node.name: node for node in nodes} diff --git a/heat/engine/resource.py b/heat/engine/resource.py index d6bc65731b..5794d6c5fa 100644 --- a/heat/engine/resource.py +++ b/heat/engine/resource.py @@ -271,10 +271,11 @@ class Resource(object): if resource: self._load_data(resource) elif stack.has_cache_data(name): - self.action = stack.cache_data[name]['action'] - self.status = stack.cache_data[name]['status'] - self.id = stack.cache_data[name]['id'] - self.uuid = stack.cache_data[name]['uuid'] + cached_data = stack.cache_data[name] + self.action = cached_data.action + self.status = cached_data.status + self.id = cached_data.primary_key + self.uuid = cached_data.uuid def rpc_client(self): """Return a client for making engine RPC calls.""" @@ -883,8 +884,8 @@ class Resource(object): """Creates the resource by invoking the scheduler TaskRunner.""" with self.lock(engine_id): self.requires = list( - set(data[u'id'] for data in resource_data.values() - if data) + set(data.primary_key for data in resource_data.values() + if data is not None) ) self.current_template_id = template_id if self.stack.adopt_stack_data is None: @@ -1144,7 +1145,7 @@ class Resource(object): def update_tmpl_id_and_requires(): self.current_template_id = template_id self.requires = list( - set(data[u'id'] for data in resource_data.values() + set(data.primary_key for data in resource_data.values() if data is not None) ) diff --git a/heat/engine/stack.py b/heat/engine/stack.py index 7cdecbfc1b..29416def1f 100644 --- a/heat/engine/stack.py +++ b/heat/engine/stack.py @@ -2008,18 +2008,16 @@ class Stack(collections.Mapping): def has_cache_data(self, resource_name): return (self.cache_data is not None and - self.cache_data.get(resource_name) is not None) + resource_name in self.cache_data) def cache_data_reference_id(self, resource_name): - return self.cache_data.get( - resource_name, {}).get('reference_id') + return self.cache_data[resource_name].reference_id() def cache_data_resource_attribute(self, resource_name, attribute_key): - return self.cache_data.get( - resource_name, {}).get('attrs', {}).get(attribute_key) + return self.cache_data[resource_name].attribute(attribute_key) def cache_data_resource_all_attributes(self, resource_name): - return self.cache_data.get(resource_name, {}).get('attrs', {}) + return self.cache_data[resource_name].attributes() def mark_complete(self): """Mark the update as complete. diff --git a/heat/engine/worker.py b/heat/engine/worker.py index a72fa73e69..f6539cb03d 100644 --- a/heat/engine/worker.py +++ b/heat/engine/worker.py @@ -27,6 +27,7 @@ from heat.common.i18n import _LW from heat.common import messaging as rpc_messaging from heat.db.sqlalchemy import api as db_api from heat.engine import check_resource +from heat.engine import node_data from heat.engine import stack as parser from heat.engine import sync_point from heat.objects import stack as stack_objects @@ -157,7 +158,9 @@ class WorkerService(object): The node may be associated with either an update or a cleanup of its associated resource. """ - resource_data = dict(sync_point.deserialize_input_data(data)) + in_data = sync_point.deserialize_input_data(data) + resource_data = node_data.load_resources_data(in_data if is_update + else {}) rsrc, rsrc_owning_stack, stack = check_resource.load_resource( cnxt, resource_id, resource_data, is_update) @@ -175,7 +178,7 @@ class WorkerService(object): cr = check_resource.CheckResource(self.engine_id, self._rpc_client, self.thread_group_mgr, - msg_queue) + msg_queue, in_data) cr.check(cnxt, resource_id, current_traversal, resource_data, is_update, adopt_stack_data, rsrc, stack) finally: diff --git a/heat/tests/autoscaling/test_heat_scaling_policy.py b/heat/tests/autoscaling/test_heat_scaling_policy.py index 13869b571d..422c06ae3b 100644 --- a/heat/tests/autoscaling/test_heat_scaling_policy.py +++ b/heat/tests/autoscaling/test_heat_scaling_policy.py @@ -19,6 +19,7 @@ import six from heat.common import exception from heat.common import template_format +from heat.engine import node_data from heat.engine import resource from heat.engine import scheduler from heat.tests.autoscaling import inline_templates @@ -146,13 +147,13 @@ class TestAutoScalingPolicy(common.HeatTestCase): def test_scaling_policy_refid_convg_cache_data(self): t = template_format.parse(as_template) - cache_data = {'my-policy': { + cache_data = {'my-policy': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'convg_xyz' - }} + })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack['my-policy'] self.assertEqual('convg_xyz', rsrc.FnGetRefId()) diff --git a/heat/tests/autoscaling/test_launch_config.py b/heat/tests/autoscaling/test_launch_config.py index c0887fb3d0..1850b88635 100644 --- a/heat/tests/autoscaling/test_launch_config.py +++ b/heat/tests/autoscaling/test_launch_config.py @@ -18,6 +18,7 @@ from heat.common import exception from heat.common import short_id from heat.common import template_format from heat.engine.clients.os import nova +from heat.engine import node_data from heat.engine import scheduler from heat.tests.autoscaling import inline_templates from heat.tests import common @@ -60,13 +61,13 @@ class LaunchConfigurationTest(common.HeatTestCase): def test_launch_config_refid_convergence_cache_data(self): t = template_format.parse(inline_templates.as_template) - cache_data = {'LaunchConfig': { + cache_data = {'LaunchConfig': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'convg_xyz' - }} + })} stack = utils.parse_stack(t, params=inline_templates.as_params, cache_data=cache_data) rsrc = stack['LaunchConfig'] diff --git a/heat/tests/autoscaling/test_scaling_policy.py b/heat/tests/autoscaling/test_scaling_policy.py index 6a839ba982..b943db0be3 100644 --- a/heat/tests/autoscaling/test_scaling_policy.py +++ b/heat/tests/autoscaling/test_scaling_policy.py @@ -19,6 +19,7 @@ import six from heat.common import exception from heat.common import template_format +from heat.engine import node_data from heat.engine import resource from heat.engine.resources.aws.autoscaling import scaling_policy as aws_sp from heat.engine import scheduler @@ -157,13 +158,13 @@ class TestAutoScalingPolicy(common.HeatTestCase): def test_refid_convergence_cache_data(self): t = template_format.parse(as_template) - cache_data = {'WebServerScaleUpPolicy': { + cache_data = {'WebServerScaleUpPolicy': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'http://convg_signed_url' - }} + })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack['WebServerScaleUpPolicy'] self.assertEqual('http://convg_signed_url', rsrc.FnGetRefId()) diff --git a/heat/tests/aws/test_eip.py b/heat/tests/aws/test_eip.py index f3e1da5a21..a22f8a653f 100644 --- a/heat/tests/aws/test_eip.py +++ b/heat/tests/aws/test_eip.py @@ -23,6 +23,7 @@ from heat.common import exception from heat.common import short_id from heat.common import template_format from heat.engine.clients.os import nova +from heat.engine import node_data from heat.engine import resource from heat.engine.resources.aws.ec2 import eip from heat.engine import rsrc_defn @@ -396,12 +397,12 @@ class EIPTest(common.HeatTestCase): template = tmpl.Template(t) stack = parser.Stack(utils.dummy_context(), 'test', template, cache_data={ - 'eip': { + 'eip': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', - 'reference_id': '1.1.1.1'}}) + 'reference_id': '1.1.1.1'})}) rsrc = stack['eip'] self.assertEqual('1.1.1.1', rsrc.FnGetRefId()) @@ -991,13 +992,13 @@ class AllocTest(common.HeatTestCase): def test_eip_allocation_refid_convergence_cache_data(self): t = template_format.parse(eip_template_ipassoc) - cache_data = {'IPAssoc': { + cache_data = {'IPAssoc': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'convg_xyz' - }} + })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack['IPAssoc'] self.assertEqual('convg_xyz', rsrc.FnGetRefId()) diff --git a/heat/tests/aws/test_loadbalancer.py b/heat/tests/aws/test_loadbalancer.py index 0b53a5ea66..d6c9162cc1 100644 --- a/heat/tests/aws/test_loadbalancer.py +++ b/heat/tests/aws/test_loadbalancer.py @@ -19,6 +19,7 @@ from oslo_config import cfg from heat.common import exception from heat.common import template_format from heat.engine.clients.os import nova +from heat.engine import node_data from heat.engine.resources.aws.lb import loadbalancer as lb from heat.engine import rsrc_defn from heat.tests import common @@ -177,13 +178,13 @@ class LoadBalancerTest(common.HeatTestCase): self.assertEqual('LoadBalancer', rsrc.FnGetRefId()) def test_loadbalancer_refid_convergence_cache_data(self): - cache_data = {'LoadBalancer': { + cache_data = {'LoadBalancer': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'LoadBalancer_convg_mock' - }} + })} rsrc = self.setup_loadbalancer(cache_data=cache_data) self.assertEqual('LoadBalancer_convg_mock', rsrc.FnGetRefId()) diff --git a/heat/tests/aws/test_security_group.py b/heat/tests/aws/test_security_group.py index 7b8fc3d0a4..a60f6b0495 100644 --- a/heat/tests/aws/test_security_group.py +++ b/heat/tests/aws/test_security_group.py @@ -24,6 +24,7 @@ from heat.common import exception from heat.common import short_id from heat.common import template_format from heat.engine.clients.os import nova +from heat.engine import node_data from heat.engine import resource from heat.engine.resources.aws.ec2 import security_group from heat.engine import rsrc_defn @@ -1132,13 +1133,13 @@ Resources: def test_security_group_refid_convg_cache_data(self): t = template_format.parse(self.test_template_nova) - cache_data = {'the_sg': { + cache_data = {'the_sg': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'convg_xyz' - }} + })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack['the_sg'] self.assertEqual('convg_xyz', rsrc.FnGetRefId()) diff --git a/heat/tests/aws/test_user.py b/heat/tests/aws/test_user.py index a592f8a655..aeda8be8b0 100644 --- a/heat/tests/aws/test_user.py +++ b/heat/tests/aws/test_user.py @@ -17,6 +17,7 @@ from oslo_config import cfg from heat.common import exception from heat.common import short_id from heat.common import template_format +from heat.engine import node_data from heat.engine.resources.aws.iam import user from heat.engine.resources.openstack.heat import access_policy as ap from heat.engine import scheduler @@ -282,13 +283,13 @@ class UserTest(common.HeatTestCase): def test_user_refid_convg_cache_data(self): t = template_format.parse(user_template) - cache_data = {'CfnUser': { + cache_data = {'CfnUser': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'convg_xyz' - }} + })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack['CfnUser'] self.assertEqual('convg_xyz', rsrc.FnGetRefId()) diff --git a/heat/tests/aws/test_waitcondition.py b/heat/tests/aws/test_waitcondition.py index 529817a570..cc9326a9e4 100644 --- a/heat/tests/aws/test_waitcondition.py +++ b/heat/tests/aws/test_waitcondition.py @@ -25,6 +25,7 @@ from heat.common import exception from heat.common import identifier from heat.common import template_format from heat.engine import environment +from heat.engine import node_data from heat.engine.resources.aws.cfn import wait_condition_handle as aws_wch from heat.engine import rsrc_defn from heat.engine import scheduler @@ -272,13 +273,13 @@ class WaitConditionTest(common.HeatTestCase): template = tmpl.Template(t) stack = parser.Stack(utils.dummy_context(), 'test', template, cache_data={ - 'WaitHandle': { + 'WaitHandle': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'http://convg_signed_url' - }}) + })}) rsrc = stack['WaitHandle'] self.assertEqual('http://convg_signed_url', rsrc.FnGetRefId()) diff --git a/heat/tests/engine/test_check_resource.py b/heat/tests/engine/test_check_resource.py index 5d8054df52..1635f1f3ae 100644 --- a/heat/tests/engine/test_check_resource.py +++ b/heat/tests/engine/test_check_resource.py @@ -52,7 +52,7 @@ class CheckWorkflowUpdateTest(common.HeatTestCase): self.cr = check_resource.CheckResource(self.worker.engine_id, self.worker._rpc_client, self.worker.thread_group_mgr, - mock.Mock()) + mock.Mock(), {}) self.worker._rpc_client = worker_client.WorkerClient() self.ctx = utils.dummy_context() self.stack = tools.get_stack( diff --git a/heat/tests/engine/test_node_data.py b/heat/tests/engine/test_node_data.py new file mode 100644 index 0000000000..c53345fed4 --- /dev/null +++ b/heat/tests/engine/test_node_data.py @@ -0,0 +1,80 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +from heat.engine import node_data + +from heat.tests import common + + +def make_test_data(): + return { + 'id': 42, + 'name': 'foo', + 'reference_id': 'foo-000000', + 'attrs': { + 'foo': 'bar', + ('foo', 'bar', 'baz'): 'quux', + ('blarg', 'wibble'): 'foo', + }, + 'action': 'CREATE', + 'status': 'COMPLETE', + 'uuid': '000000-0000-0000-0000000', + } + + +def make_test_node(): + return node_data.NodeData.from_dict(make_test_data()) + + +class NodeDataTest(common.HeatTestCase): + def test_round_trip(self): + in_dict = make_test_data() + self.assertEqual(in_dict, + node_data.NodeData.from_dict(in_dict).as_dict()) + + def test_resource_key(self): + nd = make_test_node() + self.assertEqual(42, nd.primary_key) + + def test_resource_name(self): + nd = make_test_node() + self.assertEqual('foo', nd.name) + + def test_action(self): + nd = make_test_node() + self.assertEqual('CREATE', nd.action) + + def test_status(self): + nd = make_test_node() + self.assertEqual('COMPLETE', nd.status) + + def test_refid(self): + nd = make_test_node() + self.assertEqual('foo-000000', nd.reference_id()) + + def test_all_attrs(self): + nd = make_test_node() + self.assertEqual({'foo': 'bar'}, nd.attributes()) + + def test_attr(self): + nd = make_test_node() + self.assertEqual('bar', nd.attribute('foo')) + + def test_path_attr(self): + nd = make_test_node() + self.assertEqual('quux', nd.attribute(('foo', 'bar', 'baz'))) + + def test_attr_names(self): + nd = make_test_node() + self.assertEqual({'foo', 'blarg'}, set(nd.attribute_names())) diff --git a/heat/tests/openstack/heat/test_random_string.py b/heat/tests/openstack/heat/test_random_string.py index 82b45c34aa..02707874e6 100644 --- a/heat/tests/openstack/heat/test_random_string.py +++ b/heat/tests/openstack/heat/test_random_string.py @@ -19,6 +19,7 @@ from testtools import matchers from heat.common import exception from heat.common import template_format +from heat.engine import node_data from heat.engine import stack as parser from heat.engine import template from heat.tests import common @@ -162,13 +163,13 @@ Resources: def test_random_string_refid_convergence_cache_data(self): t = template_format.parse(self.template_random_string) - cache_data = {'secret1': { + cache_data = {'secret1': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'xyz' - }} + })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack['secret1'] self.assertEqual('xyz', rsrc.FnGetRefId()) diff --git a/heat/tests/openstack/heat/test_remote_stack.py b/heat/tests/openstack/heat/test_remote_stack.py index 560379805d..f8a5530881 100644 --- a/heat/tests/openstack/heat/test_remote_stack.py +++ b/heat/tests/openstack/heat/test_remote_stack.py @@ -24,6 +24,7 @@ from heat.common.i18n import _ from heat.common import template_format from heat.engine.clients.os import heat_plugin from heat.engine import environment +from heat.engine import node_data from heat.engine import resource from heat.engine.resources.openstack.heat import remote_stack from heat.engine import rsrc_defn @@ -668,13 +669,13 @@ class RemoteStackTest(tests_common.HeatTestCase): def test_remote_stack_refid_convergence_cache_data(self): t = template_format.parse(parent_stack_template) - cache_data = {'remote_stack': { + cache_data = {'remote_stack': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'convg_xyz' - }} + })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack['remote_stack'] self.assertEqual('convg_xyz', rsrc.FnGetRefId()) diff --git a/heat/tests/openstack/heat/test_resource_chain.py b/heat/tests/openstack/heat/test_resource_chain.py index e0a341c13f..63b55cff50 100644 --- a/heat/tests/openstack/heat/test_resource_chain.py +++ b/heat/tests/openstack/heat/test_resource_chain.py @@ -16,6 +16,7 @@ import mock from heat.common import exception from heat.common import grouputils +from heat.engine import node_data from heat.engine.resources.openstack.heat import resource_chain from heat.engine import rsrc_defn from heat.tests import common @@ -216,13 +217,13 @@ class ResourceChainTest(common.HeatTestCase): self.assertEqual(['0', '1'], rsrc.FnGetAtt(rsrc.REFS)) def test_get_attribute_convg(self): - cache_data = {'test-chain': { + cache_data = {'test-chain': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'attrs': {'refs': ['rsrc1', 'rsrc2']} - }} + })} stack = utils.parse_stack(TEMPLATE, cache_data=cache_data) rsrc = stack['test-chain'] self.assertEqual(['rsrc1', 'rsrc2'], rsrc.FnGetAtt(rsrc.REFS)) diff --git a/heat/tests/openstack/heat/test_resource_group.py b/heat/tests/openstack/heat/test_resource_group.py index f0f2e32a44..7f5a6fb663 100644 --- a/heat/tests/openstack/heat/test_resource_group.py +++ b/heat/tests/openstack/heat/test_resource_group.py @@ -19,6 +19,7 @@ import six from heat.common import exception from heat.common import grouputils from heat.common import template_format +from heat.engine import node_data from heat.engine.resources.openstack.heat import resource_group from heat.engine import rsrc_defn from heat.engine import scheduler @@ -832,13 +833,13 @@ class ResourceGroupAttrTest(common.HeatTestCase): self.assertEqual(['0', '1'], rsrc.FnGetAtt(rsrc.REFS)) def test_get_attribute_convg(self): - cache_data = {'group1': { + cache_data = {'group1': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'attrs': {'refs': ['rsrc1', 'rsrc2']} - }} + })} stack = utils.parse_stack(template, cache_data=cache_data) rsrc = stack['group1'] self.assertEqual(['rsrc1', 'rsrc2'], rsrc.FnGetAtt(rsrc.REFS)) diff --git a/heat/tests/openstack/heat/test_software_deployment.py b/heat/tests/openstack/heat/test_software_deployment.py index 5d5de17aa0..89245274d0 100644 --- a/heat/tests/openstack/heat/test_software_deployment.py +++ b/heat/tests/openstack/heat/test_software_deployment.py @@ -26,6 +26,7 @@ from heat.common import template_format from heat.engine.clients.os import nova from heat.engine.clients.os import swift from heat.engine.clients.os import zaqar +from heat.engine import node_data from heat.engine import resource from heat.engine.resources.openstack.heat import software_deployment as sd from heat.engine import rsrc_defn @@ -1085,13 +1086,13 @@ class SoftwareDeploymentTest(common.HeatTestCase): self.assertEqual(0, self.deployment.FnGetAtt('deploy_status_code')) def test_fn_get_att_convg(self): - cache_data = {'deployment_mysql': { + cache_data = {'deployment_mysql': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'attrs': {'foo': 'bar'} - }} + })} self._create_stack(self.template, cache_data=cache_data) self.assertEqual('bar', self.deployment.FnGetAtt('foo')) diff --git a/heat/tests/openstack/heat/test_swiftsignal.py b/heat/tests/openstack/heat/test_swiftsignal.py index 0dc57451ad..fb4b1c59de 100644 --- a/heat/tests/openstack/heat/test_swiftsignal.py +++ b/heat/tests/openstack/heat/test_swiftsignal.py @@ -25,6 +25,7 @@ from testtools import matchers from heat.common import exception from heat.common import template_format from heat.engine.clients.os import swift +from heat.engine import node_data from heat.engine import resource from heat.engine import rsrc_defn from heat.engine import scheduler @@ -280,13 +281,15 @@ class SwiftSignalHandleTest(common.HeatTestCase): self.assertEqual(old_url, rsrc.FnGetRefId()) def test_swift_handle_refid_convergence_cache_data(self): - cache_data = {'test_wait_condition_handle': { - 'uuid': mock.ANY, - 'id': mock.ANY, - 'action': 'CREATE', - 'status': 'COMPLETE', - 'reference_id': 'convg_xyz' - }} + cache_data = { + 'test_wait_condition_handle': node_data.NodeData.from_dict({ + 'uuid': mock.ANY, + 'id': mock.ANY, + 'action': 'CREATE', + 'status': 'COMPLETE', + 'reference_id': 'convg_xyz' + }) + } st = create_stack(swiftsignalhandle_template, cache_data=cache_data) rsrc = st['test_wait_condition_handle'] self.assertEqual('convg_xyz', rsrc.FnGetRefId()) diff --git a/heat/tests/openstack/mistral/test_workflow.py b/heat/tests/openstack/mistral/test_workflow.py index 836791946d..87b9015b36 100644 --- a/heat/tests/openstack/mistral/test_workflow.py +++ b/heat/tests/openstack/mistral/test_workflow.py @@ -21,6 +21,7 @@ from oslo_serialization import jsonutils from heat.common import exception from heat.common import template_format from heat.engine.clients.os import mistral as client +from heat.engine import node_data from heat.engine import resource from heat.engine.resources.openstack.mistral import workflow from heat.engine.resources import signal_responder @@ -818,13 +819,13 @@ class TestMistralWorkflow(common.HeatTestCase): def test_mistal_workflow_refid_convergence_cache_data(self): tmpl = template_format.parse(workflow_template) - cache_data = {'workflow': { + cache_data = {'workflow': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'convg_xyz' - }} + })} stack = utils.parse_stack(tmpl, stack_name='test', cache_data=cache_data) rsrc = stack['workflow'] diff --git a/heat/tests/openstack/neutron/test_neutron_floating_ip.py b/heat/tests/openstack/neutron/test_neutron_floating_ip.py index 84e08343a0..589d19f2d9 100644 --- a/heat/tests/openstack/neutron/test_neutron_floating_ip.py +++ b/heat/tests/openstack/neutron/test_neutron_floating_ip.py @@ -24,6 +24,7 @@ from heat.common import template_format from heat.common import timeutils from heat.engine.clients.os import neutron from heat.engine.hot import functions as hot_funcs +from heat.engine import node_data from heat.engine import rsrc_defn from heat.engine import scheduler from heat.engine import stack as parser @@ -287,12 +288,12 @@ class NeutronFloatingIPTest(common.HeatTestCase): template = tmpl.Template(t) stack = parser.Stack(utils.dummy_context(), 'test', template, cache_data={ - 'floating_ip': { + 'floating_ip': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', - 'reference_id': 'abc'}}) + 'reference_id': 'abc'})}) rsrc = stack['floating_ip'] self.assertEqual('abc', rsrc.FnGetRefId()) diff --git a/heat/tests/openstack/nova/test_floatingip.py b/heat/tests/openstack/nova/test_floatingip.py index 2743e1db2b..3ac55cd365 100644 --- a/heat/tests/openstack/nova/test_floatingip.py +++ b/heat/tests/openstack/nova/test_floatingip.py @@ -20,6 +20,7 @@ from heat.common import exception as heat_ex from heat.common import short_id from heat.common import template_format from heat.engine.clients.os import nova +from heat.engine import node_data from heat.engine.resources.openstack.nova import floatingip from heat.engine import rsrc_defn from heat.engine import scheduler @@ -381,13 +382,13 @@ class NovaFloatingIPTest(common.HeatTestCase): def test_floating_ip_assoc_refid_convg_cache_data(self): t = template_format.parse(floating_ip_template_with_assoc) - cache_data = {'MyFloatingIPAssociation': { + cache_data = {'MyFloatingIPAssociation': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'convg_xyz' - }} + })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack['MyFloatingIPAssociation'] self.assertEqual('convg_xyz', rsrc.FnGetRefId()) diff --git a/heat/tests/openstack/swift/test_container.py b/heat/tests/openstack/swift/test_container.py index 9a7eb3d1a6..cca91fce07 100644 --- a/heat/tests/openstack/swift/test_container.py +++ b/heat/tests/openstack/swift/test_container.py @@ -18,6 +18,7 @@ import swiftclient.client as sc from heat.common import exception from heat.common import template_format +from heat.engine import node_data from heat.engine.resources.openstack.swift import container as swift_c from heat.engine import scheduler from heat.tests import common @@ -458,13 +459,13 @@ class SwiftTest(common.HeatTestCase): self.assertEqual('xyz', rsrc.FnGetRefId()) def test_refid_convergence_cache_data(self): - cache_data = {'SwiftContainer': { + cache_data = {'SwiftContainer': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'xyz_convg' - }} + })} stack = utils.parse_stack(self.t, cache_data=cache_data) rsrc = stack['SwiftContainer'] self.assertEqual('xyz_convg', rsrc.FnGetRefId()) diff --git a/heat/tests/test_hot.py b/heat/tests/test_hot.py index 43057acada..85a192ec44 100644 --- a/heat/tests/test_hot.py +++ b/heat/tests/test_hot.py @@ -27,6 +27,7 @@ from heat.engine import function from heat.engine.hot import functions as hot_functions from heat.engine.hot import parameters as hot_param from heat.engine.hot import template as hot_template +from heat.engine import node_data from heat.engine import resource from heat.engine import resources from heat.engine import rsrc_defn @@ -2353,9 +2354,9 @@ class StackGetAttributesTestConvergence(common.HeatTestCase): self.resource_name) # store as cache data self.stack.cache_data = { - rsrc.name: { + rsrc.name: node_data.NodeData.from_dict({ 'attrs': cr._resolve_attributes(attributes, rsrc) - } + }) } def test_get_attr_convergence(self): diff --git a/heat/tests/test_nested_stack.py b/heat/tests/test_nested_stack.py index 503b9a72c9..a092f25770 100644 --- a/heat/tests/test_nested_stack.py +++ b/heat/tests/test_nested_stack.py @@ -23,6 +23,7 @@ from heat.common import identifier from heat.common import template_format from heat.common import urlfetch from heat.engine import api +from heat.engine import node_data from heat.engine import resource from heat.engine.resources.aws.cfn import stack as stack_res from heat.engine import rsrc_defn @@ -276,13 +277,13 @@ Resources: t = template_format.parse(self.test_template) tmpl = template.Template(t) ctx = utils.dummy_context() - cache_data = {'the_nested': { + cache_data = {'the_nested': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'the_nested_convg_mock' - }} + })} stack = parser.Stack(ctx, 'test_stack', tmpl, cache_data=cache_data) nested_stack = stack['the_nested'] self.assertEqual('the_nested_convg_mock', nested_stack.FnGetRefId()) diff --git a/heat/tests/test_resource.py b/heat/tests/test_resource.py index 9b7cab90bd..515bbe8768 100644 --- a/heat/tests/test_resource.py +++ b/heat/tests/test_resource.py @@ -36,6 +36,7 @@ from heat.engine import clients from heat.engine import constraints from heat.engine import dependencies from heat.engine import environment +from heat.engine import node_data from heat.engine import plugin_manager from heat.engine import properties from heat.engine import resource @@ -1811,12 +1812,13 @@ class ResourceTest(common.HeatTestCase): }) stack = parser.Stack(utils.dummy_context(), 'test', tmpl, cache_data={ - 'res': {'attrs': {'Foo': 'Res', - 'foo': 'res'}, - 'uuid': mock.ANY, - 'id': mock.ANY, - 'action': 'CREATE', - 'status': 'COMPLETE'}}) + 'res': node_data.NodeData.from_dict({ + 'attrs': {'Foo': 'Res', + 'foo': 'res'}, + 'uuid': mock.ANY, + 'id': mock.ANY, + 'action': 'CREATE', + 'status': 'COMPLETE'})}) res = stack['res'] self.assertEqual('Res', res.FnGetAtt('Foo')) @@ -1833,12 +1835,12 @@ class ResourceTest(common.HeatTestCase): }) stack = parser.Stack(utils.dummy_context(), 'test', tmpl, cache_data={ - 'res': { + 'res': node_data.NodeData.from_dict({ 'attrs': {('nested', 'string'): 'abc'}, 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', - 'status': 'COMPLETE'}}) + 'status': 'COMPLETE'})}) res = stack['res'] self.assertEqual('abc', res.FnGetAtt('nested', 'string')) @@ -1874,12 +1876,13 @@ class ResourceTest(common.HeatTestCase): }) stack = parser.Stack(utils.dummy_context(), 'test', tmpl, cache_data={ - 'res': {'attrs': {'Foo': 'res', - 'foo': 'res'}, - 'uuid': mock.ANY, - 'id': mock.ANY, - 'action': 'CREATE', - 'status': 'COMPLETE'}}) + 'res': node_data.NodeData.from_dict({ + 'attrs': {'Foo': 'res', + 'foo': 'res'}, + 'uuid': mock.ANY, + 'id': mock.ANY, + 'action': 'CREATE', + 'status': 'COMPLETE'})}) res = stack['res'] self.assertEqual({'foo': 'res', 'Foo': 'res'}, res.FnGetAtts()) @@ -1994,6 +1997,7 @@ class ResourceTest(common.HeatTestCase): self._assert_resource_lock(res.id, None, None) res_data = {(1, True): {u'id': 1, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) pcb = mock.Mock() with mock.patch.object(resource.Resource, 'create') as mock_create: @@ -2011,6 +2015,7 @@ class ResourceTest(common.HeatTestCase): res.store() res_data = {(1, True): {u'id': 1, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) pcb = mock.Mock() self.assertRaises(scheduler.Timeout, res.create_convergence, @@ -2030,6 +2035,7 @@ class ResourceTest(common.HeatTestCase): self._assert_resource_lock(res.id, None, None) res_data = {(1, True): {u'id': 5, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) self.assertRaises(exception.ResourceNotAvailable, res.create_convergence, self.stack.t.id, res_data, 'engine-007', self.dummy_timeout, self.dummy_event) @@ -2047,6 +2053,7 @@ class ResourceTest(common.HeatTestCase): self._assert_resource_lock(res.id, None, None) res_data = {(1, True): {u'id': 5, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) tr = scheduler.TaskRunner(res.create_convergence, self.stack.t.id, res_data, 'engine-007', self.dummy_timeout, self.dummy_event) @@ -2065,6 +2072,7 @@ class ResourceTest(common.HeatTestCase): self._assert_resource_lock(res.id, None, None) res_data = {(1, True): {u'id': 5, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) tr = scheduler.TaskRunner(res.create_convergence, self.stack.t.id, res_data, 'engine-007', self.dummy_timeout, self.dummy_event) @@ -2098,6 +2106,7 @@ class ResourceTest(common.HeatTestCase): res_data = {(1, True): {u'id': 4, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) pcb = mock.Mock() with mock.patch.object(resource.Resource, 'update') as mock_update: tr = scheduler.TaskRunner(res.update_convergence, new_temp.id, @@ -2192,6 +2201,7 @@ class ResourceTest(common.HeatTestCase): res_data = {(1, True): {u'id': 4, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) tr = scheduler.TaskRunner(res.update_convergence, 'template_key', res_data, 'engine-007', self.dummy_timeout, mock.ANY, self.dummy_event) @@ -2227,6 +2237,7 @@ class ResourceTest(common.HeatTestCase): res_data = {(1, True): {u'id': 4, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) exc = Exception(_('Resource update failed')) new_stack = parser.Stack(utils.dummy_context(), 'test_stack', new_temp, stack_id=self.stack.id) @@ -2270,6 +2281,7 @@ class ResourceTest(common.HeatTestCase): res_data = {(1, True): {u'id': 4, u'name': 'A', 'attrs': {}}, (2, True): {u'id': 3, u'name': 'B', 'attrs': {}}} + res_data = node_data.load_resources_data(res_data) mock_update.side_effect = resource.UpdateReplace new_stack = parser.Stack(utils.dummy_context(), 'test_stack', new_temp, stack_id=self.stack.id) diff --git a/heat/tests/test_stack.py b/heat/tests/test_stack.py index 59f2db9ede..950ed68857 100644 --- a/heat/tests/test_stack.py +++ b/heat/tests/test_stack.py @@ -34,6 +34,7 @@ from heat.engine.clients.os import keystone from heat.engine.clients.os import nova from heat.engine import environment from heat.engine import function +from heat.engine import node_data from heat.engine import output from heat.engine import resource from heat.engine import scheduler @@ -2354,13 +2355,15 @@ class StackTest(common.HeatTestCase): } }) - cache_data = {'foo': {'reference_id': 'foo-id', + rsrcs_data = {'foo': {'reference_id': 'foo-id', 'attrs': {'bar': 'baz'}, 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE'}, 'bar': {'reference_id': 'bar-id', 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE'}} + cache_data = {n: node_data.NodeData.from_dict(d) + for n, d in rsrcs_data.items()} tmpl_stack = stack.Stack(self.ctx, 'test', tmpl) tmpl_stack.store() lightweight_stack = stack.Stack.load(self.ctx, stack_id=tmpl_stack.id, @@ -2393,12 +2396,14 @@ class StackTest(common.HeatTestCase): } }) - cache_data = {'foo': {'reference_id': 'physical-resource-id', + rsrcs_data = {'foo': {'reference_id': 'physical-resource-id', 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE'}, 'bar': {'reference_id': 'bar-id', 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE'}} + cache_data = {n: node_data.NodeData.from_dict(d) + for n, d in rsrcs_data.items()} tmpl_stack = stack.Stack(self.ctx, 'test', tmpl) tmpl_stack.store() lightweight_stack = stack.Stack.load(self.ctx, stack_id=tmpl_stack.id, diff --git a/heat/tests/test_stack_resource.py b/heat/tests/test_stack_resource.py index cb194720ab..0ddec04afc 100644 --- a/heat/tests/test_stack_resource.py +++ b/heat/tests/test_stack_resource.py @@ -23,6 +23,7 @@ import six from heat.common import exception from heat.common import identifier from heat.common import template_format +from heat.engine import node_data from heat.engine import resource from heat.engine.resources import stack_resource from heat.engine import stack as parser @@ -448,13 +449,13 @@ class StackResourceTest(StackResourceBaseTest): def test_get_attribute_autoscaling_convg(self): t = template_format.parse(heat_autoscaling_group_template) tmpl = templatem.Template(t) - cache_data = {'my_autoscaling_group': { + cache_data = {'my_autoscaling_group': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'attrs': {'current_size': 4} - }} + })} stack = parser.Stack(utils.dummy_context(), 'test_att', tmpl, cache_data=cache_data) rsrc = stack['my_autoscaling_group'] diff --git a/heat/tests/utils.py b/heat/tests/utils.py index acccc73d7b..511a314663 100644 --- a/heat/tests/utils.py +++ b/heat/tests/utils.py @@ -25,6 +25,7 @@ from heat.common import context from heat.db.sqlalchemy import api as db_api from heat.db.sqlalchemy import models from heat.engine import environment +from heat.engine import node_data from heat.engine import resource from heat.engine import stack from heat.engine import template @@ -99,6 +100,9 @@ def parse_stack(t, params=None, files=None, stack_name=None, templ.store(ctx) if stack_name is None: stack_name = random_name() + if cache_data is not None: + cache_data = {n: node_data.NodeData.from_dict(d) + for n, d in cache_data.items()} stk = stack.Stack(ctx, stack_name, templ, stack_id=stack_id, timeout_mins=timeout_mins, cache_data=cache_data) stk.store()