Browse Source

Merge "Allow scale-down of ASG as part of update" into stable/queens

stable/queens
Zuul 1 year ago
committed by Gerrit Code Review
parent
commit
14da305bf1
  1. 4
      heat/engine/resources/aws/autoscaling/autoscaling_group.py
  2. 19
      heat/engine/resources/openstack/heat/instance_group.py
  3. 69
      heat/tests/openstack/heat/test_instance_group.py
  4. 1
      heat/tests/openstack/heat/test_instance_group_update_policy.py

4
heat/engine/resources/aws/autoscaling/autoscaling_group.py

@ -186,6 +186,10 @@ class AutoScalingGroup(cooldown.CooldownMixin, instgrp.InstanceGroup):
schema=rolling_update_schema)
}
def get_size(self):
"""Get desired capacity."""
return self.properties[self.DESIRED_CAPACITY]
def handle_create(self):
return self.create_with_template(self.child_template())

19
heat/engine/resources/openstack/heat/instance_group.py

@ -133,6 +133,10 @@ class InstanceGroup(stack_resource.StackResource):
schema=rolling_update_schema)
}
def get_size(self):
"""Get desired size."""
return self.properties[self.SIZE]
def validate(self):
"""Add validation for update_policy."""
self.validate_launchconfig()
@ -323,7 +327,11 @@ class InstanceGroup(stack_resource.StackResource):
return
capacity = len(self.nested()) if self.nested() else 0
batches = list(self._get_batches(capacity, batch_size, min_in_service))
target_capacity = min(self.get_size() or capacity, capacity)
batches = list(self._get_batches(target_capacity,
capacity,
batch_size,
min_in_service))
update_timeout = self._update_timeout(len(batches), pause_sec)
@ -343,7 +351,7 @@ class InstanceGroup(stack_resource.StackResource):
self._lb_reload()
@staticmethod
def _get_batches(capacity, batch_size, min_in_service):
def _get_batches(target_capacity, capacity, batch_size, min_in_service):
"""Return an iterator over the batches in a batched update.
Each batch is a tuple comprising the total size of the group after
@ -352,15 +360,14 @@ class InstanceGroup(stack_resource.StackResource):
updating an existing one).
"""
efft_capacity = capacity
updated = 0
while rolling_update.needs_update(capacity, efft_capacity, updated):
batch = rolling_update.next_batch(capacity, efft_capacity,
while rolling_update.needs_update(target_capacity, capacity, updated):
batch = rolling_update.next_batch(target_capacity, capacity,
updated, batch_size,
min_in_service)
yield batch
efft_capacity, num_updates = batch
capacity, num_updates = batch
updated += num_updates
def _check_for_completion(self, updater):

69
heat/tests/openstack/heat/test_instance_group.py

@ -431,42 +431,45 @@ class ResizeWithFailedInstancesTest(InstanceGroupWithNestedStack):
class TestGetBatches(common.HeatTestCase):
scenarios = [
('4_1_0', dict(curr_cap=4, bat_size=1, min_serv=0,
batches=[(4, 1)] * 4)),
('4_1_4', dict(curr_cap=4, bat_size=1, min_serv=4,
batches=([(5, 1)] * 4) + [(4, 0)])),
('4_1_5', dict(curr_cap=4, bat_size=1, min_serv=5,
batches=([(5, 1)] * 4) + [(4, 0)])),
('4_2_0', dict(curr_cap=4, bat_size=2, min_serv=0,
batches=[(4, 2)] * 2)),
('4_2_4', dict(curr_cap=4, bat_size=2, min_serv=4,
batches=([(6, 2)] * 2) + [(4, 0)])),
('5_2_0', dict(curr_cap=5, bat_size=2, min_serv=0,
batches=([(5, 2)] * 2) + [(5, 1)])),
('5_2_4', dict(curr_cap=5, bat_size=2, min_serv=4,
batches=([(6, 2)] * 2) + [(5, 1)])),
('3_2_0', dict(curr_cap=3, bat_size=2, min_serv=0,
batches=[(3, 2), (3, 1)])),
('3_2_4', dict(curr_cap=3, bat_size=2, min_serv=4,
batches=[(5, 2), (4, 1), (3, 0)])),
('4_4_0', dict(curr_cap=4, bat_size=4, min_serv=0,
batches=[(4, 4)])),
('4_5_0', dict(curr_cap=4, bat_size=5, min_serv=0,
batches=[(4, 4)])),
('4_4_1', dict(curr_cap=4, bat_size=4, min_serv=1,
batches=[(5, 4), (4, 0)])),
('4_6_1', dict(curr_cap=4, bat_size=6, min_serv=1,
batches=[(5, 4), (4, 0)])),
('4_4_2', dict(curr_cap=4, bat_size=4, min_serv=2,
batches=[(6, 4), (4, 0)])),
('4_4_4', dict(curr_cap=4, bat_size=4, min_serv=4,
batches=[(8, 4), (4, 0)])),
('4_5_6', dict(curr_cap=4, bat_size=5, min_serv=6,
batches=[(8, 4), (4, 0)])),
('4_4_1_0', dict(tgt_cap=4, curr_cap=4, bat_size=1, min_serv=0,
batches=[(4, 1)] * 4)),
('3_4_1_0', dict(tgt_cap=3, curr_cap=4, bat_size=1, min_serv=0,
batches=[(3, 1)] * 3)),
('4_4_1_4', dict(tgt_cap=4, curr_cap=4, bat_size=1, min_serv=4,
batches=([(5, 1)] * 4) + [(4, 0)])),
('4_4_1_5', dict(tgt_cap=4, curr_cap=4, bat_size=1, min_serv=5,
batches=([(5, 1)] * 4) + [(4, 0)])),
('4_4_2_0', dict(tgt_cap=4, curr_cap=4, bat_size=2, min_serv=0,
batches=[(4, 2)] * 2)),
('4_4_2_4', dict(tgt_cap=4, curr_cap=4, bat_size=2, min_serv=4,
batches=([(6, 2)] * 2) + [(4, 0)])),
('5_5_2_0', dict(tgt_cap=5, curr_cap=5, bat_size=2, min_serv=0,
batches=([(5, 2)] * 2) + [(5, 1)])),
('5_5_2_4', dict(tgt_cap=5, curr_cap=5, bat_size=2, min_serv=4,
batches=([(6, 2)] * 2) + [(5, 1)])),
('3_3_2_0', dict(tgt_cap=3, curr_cap=3, bat_size=2, min_serv=0,
batches=[(3, 2), (3, 1)])),
('3_3_2_4', dict(tgt_cap=3, curr_cap=3, bat_size=2, min_serv=4,
batches=[(5, 2), (4, 1), (3, 0)])),
('4_4_4_0', dict(tgt_cap=4, curr_cap=4, bat_size=4, min_serv=0,
batches=[(4, 4)])),
('4_4_5_0', dict(tgt_cap=4, curr_cap=4, bat_size=5, min_serv=0,
batches=[(4, 4)])),
('4_4_4_1', dict(tgt_cap=4, curr_cap=4, bat_size=4, min_serv=1,
batches=[(5, 4), (4, 0)])),
('4_4_6_1', dict(tgt_cap=4, curr_cap=4, bat_size=6, min_serv=1,
batches=[(5, 4), (4, 0)])),
('4_4_4_2', dict(tgt_cap=4, curr_cap=4, bat_size=4, min_serv=2,
batches=[(6, 4), (4, 0)])),
('4_4_4_4', dict(tgt_cap=4, curr_cap=4, bat_size=4, min_serv=4,
batches=[(8, 4), (4, 0)])),
('4_4_5_6', dict(tgt_cap=4, curr_cap=4, bat_size=5, min_serv=6,
batches=[(8, 4), (4, 0)])),
]
def test_get_batches(self):
batches = list(instgrp.InstanceGroup._get_batches(self.curr_cap,
batches = list(instgrp.InstanceGroup._get_batches(self.tgt_cap,
self.curr_cap,
self.bat_size,
self.min_serv))
self.assertEqual(self.batches, batches)

1
heat/tests/openstack/heat/test_instance_group_update_policy.py

@ -298,5 +298,6 @@ class InstanceGroupReplaceTest(common.HeatTestCase):
group = instgrp.InstanceGroup('asg', defn, stack)
group.nested = mock.MagicMock(return_value=range(12))
group.get_size = mock.Mock(return_value=12)
self.assertRaises(ValueError,
group._replace, 10, 1, 14 * 60)
Loading…
Cancel
Save