Improve resource.NoActionRequired

Log the proper reasons (scaling activity or cooldown)
when scaling is not allowed.

Change-Id: I3a9ed8fe7bff8696551470a9fac0b1a2831eb95c
This commit is contained in:
huangtianhua 2017-01-16 16:07:15 +08:00
parent a27b6cc1e4
commit 197febdb01
8 changed files with 84 additions and 76 deletions

View File

@ -71,7 +71,11 @@ class NoActionRequired(Exception):
Resource subclasses should raise this exception from handle_signal() to
suppress recording of an event corresponding to the signal.
"""
pass
def __init__(self, res_name='Unknown', reason=''):
msg = (_("The resource %(res)s could not perform "
"scaling action: %(reason)s") %
{'res': res_name, 'reason': reason})
super(Exception, self).__init__(six.text_type(msg))
class PollDelay(Exception):
@ -2162,7 +2166,8 @@ class Resource(object):
pass
except Exception as ex:
LOG.info(_LI('signal %(name)s : %(msg)s'),
{'name': six.text_type(self), 'msg': ex},
{'name': six.text_type(self),
'msg': six.text_type(ex)},
exc_info=True)
failure = exception.ResourceFailure(ex, self)
raise failure

View File

@ -295,12 +295,7 @@ class AutoScalingGroup(instgrp.InstanceGroup, cooldown.CooldownMixin):
"as there is no change in capacity.") % self.name)
raise resource.NoActionRequired
if not self._is_scaling_allowed():
LOG.info(_LI("%(name)s NOT performing scaling adjustment, "
"cooldown %(cooldown)s") % {
'name': self.name,
'cooldown': self.properties[self.COOLDOWN]})
raise resource.NoActionRequired
self._check_scaling_allowed()
# send a notification before, on-error and on-success.
notif = {

View File

@ -174,12 +174,7 @@ class AutoScalingPolicy(signal_responder.SignalResponder,
) % {'alarm': self.name,
'group': asgn_id})
if not self._is_scaling_allowed():
LOG.info(_LI("%(name)s NOT performing scaling action, "
"cooldown %(cooldown)s") % {
'name': self.name,
'cooldown': self.properties[self.COOLDOWN]})
raise resource.NoActionRequired
self._check_scaling_allowed()
LOG.info(_LI('%(name)s alarm, adjusting group %(group)s with id '
'%(asgn_id)s by %(filter)s') % {

View File

@ -11,10 +11,17 @@
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from heat.common import exception
from heat.common.i18n import _
from heat.common.i18n import _LI
from heat.engine import resource
from oslo_utils import timeutils
import six
LOG = logging.getLogger(__name__)
class CooldownMixin(object):
"""Utility class to encapsulate Cooldown related logic.
@ -23,10 +30,14 @@ class CooldownMixin(object):
This logic includes both cooldown timestamp comparing and scaling in
progress checking.
"""
def _is_scaling_allowed(self):
def _check_scaling_allowed(self):
metadata = self.metadata_get()
if metadata.get('scaling_in_progress'):
return False
LOG.info(_LI("Can not perform scaling action: resource %s "
"is already in scaling.") % self.name)
reason = _('due to scaling activity')
raise resource.NoActionRequired(res_name=self.name,
reason=reason)
try:
# Negative values don't make sense, so they are clamped to zero
cooldown = max(0, self.properties[self.COOLDOWN])
@ -40,12 +51,10 @@ class CooldownMixin(object):
# Note: this is for supporting old version cooldown logic
if metadata:
last_adjust = next(six.iterkeys(metadata))
if not timeutils.is_older_than(last_adjust, cooldown):
return False
self._cooldown_check(cooldown, last_adjust)
else:
last_adjust = next(six.iterkeys(metadata['cooldown']))
if not timeutils.is_older_than(last_adjust, cooldown):
return False
self._cooldown_check(cooldown, last_adjust)
except ValueError:
# occurs when metadata has only {scaling_in_progress: False}
pass
@ -54,7 +63,17 @@ class CooldownMixin(object):
# after the scaling operation completes
metadata['scaling_in_progress'] = True
self.metadata_set(metadata)
return True
def _cooldown_check(self, cooldown, last_adjust):
if not timeutils.is_older_than(last_adjust, cooldown):
LOG.info(_LI("Can not perform scaling action: "
"resource %(name)s is in cooldown (%(cooldown)s).") %
{'name': self.name,
'cooldown': cooldown})
reason = _('due to cooldown, '
'cooldown %s') % cooldown
raise resource.NoActionRequired(
res_name=self.name, reason=reason)
def _finished_scaling(self, cooldown_reason, size_changed=True):
# If we wanted to implement the AutoScaling API like AWS does,

View File

@ -110,10 +110,9 @@ class TestGroupAdjust(common.HeatTestCase):
self.assertIsNone(self.group.validate())
def test_scaling_policy_cooldown_toosoon(self):
"""If _is_scaling_allowed() returns False don't progress."""
dont_call = self.patchobject(self.group, 'resize')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=False)
self.patchobject(self.group, '_check_scaling_allowed',
side_effect=resource.NoActionRequired)
self.assertRaises(resource.NoActionRequired,
self.group.adjust, 1)
self.assertEqual([], dont_call.call_args_list)
@ -137,8 +136,7 @@ class TestGroupAdjust(common.HeatTestCase):
resize = self.patchobject(self.group, 'resize')
finished_scaling = self.patchobject(self.group, '_finished_scaling')
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.group.adjust(33, adjustment_type='PercentChangeInCapacity',
min_adjustment_step=2)
@ -169,8 +167,7 @@ class TestGroupAdjust(common.HeatTestCase):
resize = self.patchobject(self.group, 'resize')
finished_scaling = self.patchobject(self.group, '_finished_scaling')
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.group.adjust(-33, adjustment_type='PercentChangeInCapacity',
min_adjustment_step=2)
@ -201,8 +198,7 @@ class TestGroupAdjust(common.HeatTestCase):
resize = self.patchobject(self.group, 'resize')
finished_scaling = self.patchobject(self.group, '_finished_scaling')
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.group.adjust(1)
expected_notifies = [
@ -231,8 +227,7 @@ class TestGroupAdjust(common.HeatTestCase):
self.patchobject(self.group, 'resize',
side_effect=ValueError('test error'))
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.patchobject(self.group, '_finished_scaling')
self.assertRaises(ValueError, self.group.adjust, 1)
@ -261,8 +256,7 @@ class TestGroupAdjust(common.HeatTestCase):
self.patchobject(self.group, 'resize',
side_effect=ValueError('test error'))
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.patchobject(self.group, '_finished_scaling')
self.assertRaises(ValueError, self.group.adjust,

View File

@ -84,8 +84,8 @@ class TestAutoScalingPolicy(common.HeatTestCase):
self.patchobject(group, 'adjust',
side_effect=resource.NoActionRequired())
mock_fin_scaling = self.patchobject(up_policy, '_finished_scaling')
with mock.patch.object(up_policy, '_is_scaling_allowed',
return_value=True) as mock_isa:
with mock.patch.object(up_policy,
'_check_scaling_allowed') as mock_isa:
self.assertRaises(resource.NoActionRequired,
up_policy.handle_signal)
mock_isa.assert_called_once_with()
@ -100,15 +100,14 @@ class TestAutoScalingPolicy(common.HeatTestCase):
group = stack['my-group']
self.patchobject(group, 'adjust')
mock_fin_scaling = self.patchobject(up_policy, '_finished_scaling')
with mock.patch.object(up_policy, '_is_scaling_allowed',
return_value=True) as mock_isa:
with mock.patch.object(up_policy,
'_check_scaling_allowed') as mock_isa:
self.assertIsNone(up_policy.handle_signal())
mock_isa.assert_called_once_with()
mock_fin_scaling.assert_called_once_with('change_in_capacity : 1',
size_changed=True)
def test_scaling_policy_cooldown_toosoon(self):
"""If _is_scaling_allowed() returns False don't progress."""
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=as_params)
pol = self.create_scaling_policy(t, stack, 'my-policy')
@ -117,8 +116,9 @@ class TestAutoScalingPolicy(common.HeatTestCase):
with mock.patch.object(group, 'adjust',
side_effect=AssertionError) as dont_call:
with mock.patch.object(pol, '_is_scaling_allowed',
return_value=False) as mock_cip:
with mock.patch.object(
pol, '_check_scaling_allowed',
side_effect=resource.NoActionRequired) as mock_cip:
self.assertRaises(resource.NoActionRequired,
pol.handle_signal, details=test)
mock_cip.assert_called_once_with()
@ -132,8 +132,7 @@ class TestAutoScalingPolicy(common.HeatTestCase):
group = self.patchobject(pol.stack, 'resource_by_refid').return_value
group.name = 'fluffy'
with mock.patch.object(pol, '_is_scaling_allowed',
return_value=True) as mock_isa:
with mock.patch.object(pol, '_check_scaling_allowed') as mock_isa:
pol.handle_signal(details=test)
mock_isa.assert_called_once_with()
group.adjust.assert_called_once_with(1, 'change_in_capacity', None)
@ -176,7 +175,9 @@ class TestCooldownMixin(common.HeatTestCase):
previous_meta = {'cooldown': {
now.isoformat(): 'change_in_capacity : 1'}}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertFalse(pol._is_scaling_allowed())
ex = self.assertRaises(resource.NoActionRequired,
pol._check_scaling_allowed)
self.assertIn('due to cooldown', six.text_type(ex))
def test_cooldown_is_in_progress_scaling_unfinished(self):
t = template_format.parse(as_template)
@ -185,7 +186,9 @@ class TestCooldownMixin(common.HeatTestCase):
previous_meta = {'scaling_in_progress': True}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertFalse(pol._is_scaling_allowed())
ex = self.assertRaises(resource.NoActionRequired,
pol._check_scaling_allowed)
self.assertIn('due to scaling activity', six.text_type(ex))
def test_cooldown_not_in_progress(self):
t = template_format.parse(as_template)
@ -200,7 +203,7 @@ class TestCooldownMixin(common.HeatTestCase):
'scaling_in_progress': False
}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertTrue(pol._is_scaling_allowed())
self.assertIsNone(pol._check_scaling_allowed())
def test_scaling_policy_cooldown_zero(self):
t = template_format.parse(as_template)
@ -216,7 +219,7 @@ class TestCooldownMixin(common.HeatTestCase):
previous_meta = {'cooldown': {
now.isoformat(): 'change_in_capacity : 1'}}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertTrue(pol._is_scaling_allowed())
self.assertIsNone(pol._check_scaling_allowed())
def test_scaling_policy_cooldown_none(self):
t = template_format.parse(as_template)
@ -233,7 +236,7 @@ class TestCooldownMixin(common.HeatTestCase):
previous_meta = {'cooldown': {
now.isoformat(): 'change_in_capacity : 1'}}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertTrue(pol._is_scaling_allowed())
self.assertIsNone(pol._check_scaling_allowed())
def test_no_cooldown_no_scaling_in_progress(self):
t = template_format.parse(as_template)
@ -243,7 +246,7 @@ class TestCooldownMixin(common.HeatTestCase):
# no cooldown entry in the metadata
previous_meta = {'scaling_in_progress': False}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertTrue(pol._is_scaling_allowed())
self.assertIsNone(pol._check_scaling_allowed())
def test_metadata_is_written(self):
t = template_format.parse(as_template)

View File

@ -294,10 +294,9 @@ class TestGroupAdjust(common.HeatTestCase):
self.assertIsNone(self.group.validate())
def test_scaling_policy_cooldown_toosoon(self):
"""If _is_scaling_allowed() returns False don't progress."""
dont_call = self.patchobject(self.group, 'resize')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=False)
self.patchobject(self.group, '_check_scaling_allowed',
side_effect=resource.NoActionRequired)
self.assertRaises(resource.NoActionRequired,
self.group.adjust, 1)
self.assertEqual([], dont_call.call_args_list)
@ -335,8 +334,7 @@ class TestGroupAdjust(common.HeatTestCase):
resize = self.patchobject(self.group, 'resize')
finished_scaling = self.patchobject(self.group, '_finished_scaling')
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.group.adjust(33, adjustment_type='PercentChangeInCapacity',
min_adjustment_step=2)
@ -367,8 +365,7 @@ class TestGroupAdjust(common.HeatTestCase):
resize = self.patchobject(self.group, 'resize')
finished_scaling = self.patchobject(self.group, '_finished_scaling')
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.group.adjust(-33, adjustment_type='PercentChangeInCapacity',
min_adjustment_step=2)
@ -399,8 +396,7 @@ class TestGroupAdjust(common.HeatTestCase):
resize = self.patchobject(self.group, 'resize')
finished_scaling = self.patchobject(self.group, '_finished_scaling')
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.group.adjust(1)
expected_notifies = [
@ -429,8 +425,7 @@ class TestGroupAdjust(common.HeatTestCase):
self.patchobject(self.group, 'resize',
side_effect=ValueError('test error'))
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.patchobject(self.group, '_finished_scaling')
self.assertRaises(ValueError, self.group.adjust, 1)
@ -459,8 +454,7 @@ class TestGroupAdjust(common.HeatTestCase):
self.patchobject(self.group, 'resize',
side_effect=ValueError('test error'))
notify = self.patch('heat.engine.notification.autoscaling.send')
self.patchobject(self.group, '_is_scaling_allowed',
return_value=True)
self.patchobject(self.group, '_check_scaling_allowed')
self.patchobject(self.group, '_finished_scaling')
self.assertRaises(ValueError, self.group.adjust,

View File

@ -87,8 +87,8 @@ class TestAutoScalingPolicy(common.HeatTestCase):
self.patchobject(group, 'adjust',
side_effect=resource.NoActionRequired())
mock_fin_scaling = self.patchobject(up_policy, '_finished_scaling')
with mock.patch.object(up_policy, '_is_scaling_allowed',
return_value=True) as mock_isa:
with mock.patch.object(up_policy,
'_check_scaling_allowed') as mock_isa:
self.assertRaises(resource.NoActionRequired,
up_policy.handle_signal)
mock_isa.assert_called_once_with()
@ -103,15 +103,14 @@ class TestAutoScalingPolicy(common.HeatTestCase):
group = stack['WebServerGroup']
self.patchobject(group, 'adjust')
mock_fin_scaling = self.patchobject(up_policy, '_finished_scaling')
with mock.patch.object(up_policy, '_is_scaling_allowed',
return_value=True) as mock_isa:
with mock.patch.object(up_policy,
'_check_scaling_allowed') as mock_isa:
self.assertIsNone(up_policy.handle_signal())
mock_isa.assert_called_once_with()
mock_fin_scaling.assert_called_once_with('ChangeInCapacity : 1',
size_changed=True)
def test_scaling_policy_cooldown_toosoon(self):
"""If _is_scaling_allowed() returns False don't progress."""
t = template_format.parse(as_template)
stack = utils.parse_stack(t, params=as_params)
pol = self.create_scaling_policy(t, stack, 'WebServerScaleUpPolicy')
@ -120,8 +119,9 @@ class TestAutoScalingPolicy(common.HeatTestCase):
with mock.patch.object(group, 'adjust',
side_effect=AssertionError) as dont_call:
with mock.patch.object(pol, '_is_scaling_allowed',
return_value=False) as mock_isa:
with mock.patch.object(
pol, '_check_scaling_allowed',
side_effect=resource.NoActionRequired) as mock_isa:
self.assertRaises(resource.NoActionRequired,
pol.handle_signal, details=test)
mock_isa.assert_called_once_with()
@ -135,8 +135,7 @@ class TestAutoScalingPolicy(common.HeatTestCase):
group = self.patchobject(pol.stack, 'resource_by_refid').return_value
group.name = 'fluffy'
with mock.patch.object(pol, '_is_scaling_allowed',
return_value=True) as mock_isa:
with mock.patch.object(pol, '_check_scaling_allowed') as mock_isa:
pol.handle_signal(details=test)
mock_isa.assert_called_once_with()
group.adjust.assert_called_once_with(1, 'ChangeInCapacity', None)
@ -187,7 +186,9 @@ class TestCooldownMixin(common.HeatTestCase):
previous_meta = {'cooldown': {
now.isoformat(): 'ChangeInCapacity : 1'}}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertFalse(pol._is_scaling_allowed())
ex = self.assertRaises(resource.NoActionRequired,
pol._check_scaling_allowed)
self.assertIn('due to cooldown', six.text_type(ex))
def test_cooldown_is_in_progress_scaling_unfinished(self):
t = template_format.parse(as_template)
@ -196,7 +197,9 @@ class TestCooldownMixin(common.HeatTestCase):
previous_meta = {'scaling_in_progress': True}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertFalse(pol._is_scaling_allowed())
ex = self.assertRaises(resource.NoActionRequired,
pol._check_scaling_allowed)
self.assertIn('due to scaling activity', six.text_type(ex))
def test_cooldown_not_in_progress(self):
t = template_format.parse(as_template)
@ -211,7 +214,7 @@ class TestCooldownMixin(common.HeatTestCase):
'scaling_in_progress': False
}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertTrue(pol._is_scaling_allowed())
self.assertIsNone(pol._check_scaling_allowed())
def test_scaling_policy_cooldown_zero(self):
t = template_format.parse(as_template)
@ -226,7 +229,7 @@ class TestCooldownMixin(common.HeatTestCase):
now = timeutils.utcnow()
previous_meta = {now.isoformat(): 'ChangeInCapacity : 1'}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertTrue(pol._is_scaling_allowed())
self.assertIsNone(pol._check_scaling_allowed())
def test_scaling_policy_cooldown_none(self):
t = template_format.parse(as_template)
@ -242,7 +245,7 @@ class TestCooldownMixin(common.HeatTestCase):
now = timeutils.utcnow()
previous_meta = {now.isoformat(): 'ChangeInCapacity : 1'}
self.patchobject(pol, 'metadata_get', return_value=previous_meta)
self.assertTrue(pol._is_scaling_allowed())
self.assertIsNone(pol._check_scaling_allowed())
def test_metadata_is_written(self):
t = template_format.parse(as_template)