Make sure to not try reserving a reserved node
After the switch to openstacksdk we no longer have a sufficient check on node's availability or maintenance. This patch restores it. Change-Id: I2c85cf0adb02061b3dd85f19dd10c8a5af1118da
This commit is contained in:
parent
6c55e2f3e2
commit
e795f6c841
@ -98,25 +98,29 @@ class Provisioner(_utils.GetNodeMixin):
|
||||
|
||||
if candidates:
|
||||
nodes = [self._get_node(node) for node in candidates]
|
||||
filters = [
|
||||
_scheduler.NodeTypeFilter(resource_class, conductor_group),
|
||||
]
|
||||
else:
|
||||
kwargs = {}
|
||||
if conductor_group:
|
||||
kwargs['conductor_group'] = conductor_group
|
||||
nodes = list(self.connection.baremetal.nodes(
|
||||
associated=False,
|
||||
provision_state='available',
|
||||
maintenance=False,
|
||||
resource_class=resource_class,
|
||||
conductor_group=conductor_group,
|
||||
details=True))
|
||||
details=True,
|
||||
**kwargs))
|
||||
if not nodes:
|
||||
raise exceptions.NodesNotFound(resource_class, conductor_group)
|
||||
# Ensure parallel executions don't try nodes in the same sequence
|
||||
random.shuffle(nodes)
|
||||
# No need to filter by resource_class and conductor_group any more
|
||||
filters = []
|
||||
|
||||
LOG.debug('Candidate nodes: %s', nodes)
|
||||
|
||||
filters.append(_scheduler.CapabilitiesFilter(capabilities))
|
||||
filters.append(_scheduler.TraitsFilter(traits))
|
||||
filters = [
|
||||
_scheduler.NodeTypeFilter(resource_class, conductor_group),
|
||||
_scheduler.CapabilitiesFilter(capabilities),
|
||||
_scheduler.TraitsFilter(traits),
|
||||
]
|
||||
if predicate is not None:
|
||||
filters.append(_scheduler.CustomPredicateFilter(predicate))
|
||||
|
||||
|
@ -123,12 +123,33 @@ class NodeTypeFilter(Filter):
|
||||
self.conductor_group = conductor_group
|
||||
|
||||
def __call__(self, node):
|
||||
return (
|
||||
(self.resource_class is None or
|
||||
node.resource_class == self.resource_class) and
|
||||
(self.conductor_group is None or
|
||||
node.conductor_group == self.conductor_group)
|
||||
)
|
||||
if node.instance_id:
|
||||
LOG.debug('Node %s is already reserved', _utils.log_res(node))
|
||||
return False
|
||||
|
||||
if node.is_maintenance:
|
||||
LOG.debug('Node %s is in maintenance', _utils.log_res(node))
|
||||
return False
|
||||
|
||||
if (self.resource_class is not None
|
||||
and node.resource_class != self.resource_class):
|
||||
LOG.debug('Resource class %(real)s does not match the expected '
|
||||
'value of %(exp)s for node %(node)s',
|
||||
{'node': _utils.log_res(node),
|
||||
'exp': self.resource_class,
|
||||
'real': node.resource_class})
|
||||
return False
|
||||
|
||||
if (self.conductor_group is not None
|
||||
and node.conductor_group != self.conductor_group):
|
||||
LOG.debug('Conductor group %(real)s does not match the expected '
|
||||
'value of %(exp)s for node %(node)s',
|
||||
{'node': _utils.log_res(node),
|
||||
'exp': self.conductor_group,
|
||||
'real': node.conductor_group})
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def fail(self):
|
||||
raise exceptions.NodesNotFound(self.resource_class,
|
||||
|
@ -29,7 +29,7 @@ from metalsmith import sources
|
||||
|
||||
NODE_FIELDS = ['name', 'id', 'instance_info', 'instance_id', 'is_maintenance',
|
||||
'maintenance_reason', 'properties', 'provision_state', 'extra',
|
||||
'last_error', 'traits']
|
||||
'last_error', 'traits', 'resource_class', 'conductor_group']
|
||||
|
||||
|
||||
class TestInit(testtools.TestCase):
|
||||
@ -98,6 +98,8 @@ class TestReserveNode(Base):
|
||||
kwargs.setdefault('id', '000')
|
||||
kwargs.setdefault('properties', {'local_gb': 100})
|
||||
kwargs.setdefault('instance_info', {})
|
||||
kwargs.setdefault('instance_id', None)
|
||||
kwargs.setdefault('is_maintenance', False)
|
||||
return mock.Mock(spec=NODE_FIELDS, **kwargs)
|
||||
|
||||
def test_no_nodes(self):
|
||||
@ -108,7 +110,7 @@ class TestReserveNode(Base):
|
||||
self.assertFalse(self.api.baremetal.update_node.called)
|
||||
|
||||
def test_simple_ok(self):
|
||||
nodes = [self._node()]
|
||||
nodes = [self._node(resource_class='control')]
|
||||
self.api.baremetal.nodes.return_value = nodes
|
||||
|
||||
node = self.pr.reserve_node('control')
|
||||
@ -129,7 +131,8 @@ class TestReserveNode(Base):
|
||||
|
||||
def test_with_capabilities(self):
|
||||
nodes = [
|
||||
self._node(properties={'local_gb': 100, 'capabilities': caps})
|
||||
self._node(properties={'local_gb': 100, 'capabilities': caps},
|
||||
resource_class='control')
|
||||
for caps in ['answer:1', 'answer:42', None]
|
||||
]
|
||||
expected = nodes[1]
|
||||
@ -235,8 +238,14 @@ class TestReserveNode(Base):
|
||||
instance_info={'capabilities': {'cat': 'meow'}})
|
||||
|
||||
def test_provided_nodes_no_match(self):
|
||||
nodes = [self._node(resource_class='compute', conductor_group='loc1'),
|
||||
self._node(resource_class='control', conductor_group='loc2')]
|
||||
nodes = [
|
||||
self._node(resource_class='compute', conductor_group='loc1'),
|
||||
self._node(resource_class='control', conductor_group='loc2'),
|
||||
self._node(resource_class='control', conductor_group='loc1',
|
||||
is_maintenance=True),
|
||||
self._node(resource_class='control', conductor_group='loc1',
|
||||
instance_id='abcd')
|
||||
]
|
||||
|
||||
self.assertRaises(exceptions.NodesNotFound,
|
||||
self.pr.reserve_node, candidates=nodes,
|
||||
|
5
releasenotes/notes/associated-993c26ac5dc0cfc0.yaml
Normal file
5
releasenotes/notes/associated-993c26ac5dc0cfc0.yaml
Normal file
@ -0,0 +1,5 @@
|
||||
---
|
||||
critical:
|
||||
- |
|
||||
Fixes a regression that caused deployed nodes to be picked for deployment
|
||||
again.
|
Loading…
Reference in New Issue
Block a user