Only use parameter_defaults for scaling down

We were using parameters for scaling down but parameter_defaults for
scaling up. The old parameters from scale-down still persisted in the
stack and took precedence over the scaled-up parameter_defaults, so the
scaled-up values didn't propagate.

We already update parameter_defaults in the deployment plan in that same
method, so all we need to do is remove the 'parameters' part.

Change-Id: I90a195a66b15c7c9057bfb415b6a730bbd9a939c
Closes-Bug: #1641142
This commit is contained in:
Jiri Stransky 2016-11-11 16:46:28 +01:00
parent 51086a645c
commit fbc435ce16
2 changed files with 1 additions and 12 deletions

View File

@ -83,11 +83,6 @@ class ScaleDownAction(templates.ProcessTemplatesAction):
fields = processed_data.copy()
fields['timeout_mins'] = timeout_mins
fields['existing'] = True
# TODO(rbrady): test against live overcloud to see if it is required
# to pass 'parameters' here to the stack update, or will storing the
# updated values in the plan and send them merged into the environment
# work as well
fields['parameters'] = parameters
LOG.debug('stack update params: %s', fields)
self._get_orchestration_client().stacks.update(self.container,

View File

@ -119,10 +119,4 @@ class ScaleDownActionTest(base.TestCase):
environment=env,
existing=True,
files={},
timeout_mins=240,
parameters={
'ComputeCount': '0',
'ComputeRemovalPolicies': [
{'resource_list': ['node0']}
]
})
timeout_mins=240)