Extend node_cache.pop_node() result to be a structure
In the next patch we will need `started_at` to calculate timeout for waiting for power off state. We will probably need other fields that might be stored in the cache. Change-Id: I9d7107322530c5ac6e407a374acb52c4effe4563 Implements: blueprint returning-to-ramdisk
This commit is contained in:
parent
472537d213
commit
a29c3999fa
|
@ -64,21 +64,23 @@ def process(node_info):
|
|||
'ipmi_address': bmc_address})
|
||||
LOG.info('Eligible interfaces are %s', valid_interfaces)
|
||||
|
||||
uuid = node_cache.pop_node(bmc_address=bmc_address, mac=valid_macs)
|
||||
cached_node = node_cache.pop_node(bmc_address=bmc_address, mac=valid_macs)
|
||||
ironic = utils.get_client()
|
||||
try:
|
||||
node = ironic.node.get(uuid)
|
||||
node = ironic.node.get(cached_node.uuid)
|
||||
except exceptions.NotFound as exc:
|
||||
LOG.error('Node UUID %(uuid)s is in the cache, but not found '
|
||||
'in Ironic: %(exc)s',
|
||||
{'uuid': uuid, 'exc': exc})
|
||||
{'uuid': cached_node.uuid, 'exc': exc})
|
||||
raise utils.DiscoveryFailed('Node UUID %s was found is cache, '
|
||||
'but is not found in Ironic' % uuid,
|
||||
'but is not found in Ironic' %
|
||||
cached_node.uuid,
|
||||
code=404)
|
||||
|
||||
if not node.extra.get('on_discovery'):
|
||||
LOG.error('Node is not on discovery, cannot proceed')
|
||||
raise utils.DiscoveryFailed('Node %s is not on discovery' % uuid,
|
||||
raise utils.DiscoveryFailed('Node %s is not on discovery' %
|
||||
cached_node.uuid,
|
||||
code=403)
|
||||
|
||||
updated = _process_node(ironic, node, node_info, valid_macs)
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
"""Cache for nodes currently under discovery."""
|
||||
|
||||
import atexit
|
||||
import collections
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
|
@ -37,6 +38,10 @@ create table if not exists attributes
|
|||
"""
|
||||
|
||||
|
||||
NodeInfo = collections.namedtuple('NodeInfo', ('uuid', 'started_at'))
|
||||
"""Record about a node in the cache."""
|
||||
|
||||
|
||||
def init():
|
||||
"""Initialize the database."""
|
||||
global _DB_NAME
|
||||
|
@ -109,7 +114,7 @@ def pop_node(**attributes):
|
|||
This function also deletes a node from the cache, thus it's name.
|
||||
|
||||
:param attributes: attributes known about this node (like macs, BMC etc)
|
||||
:returns: UUID
|
||||
:returns: structure NodeInfo with attributes ``uuid`` and ``created_at``
|
||||
:raises: DiscoveryFailed if node is not found
|
||||
"""
|
||||
# NOTE(dtantsur): sorting is not required, but gives us predictability
|
||||
|
@ -141,8 +146,17 @@ def pop_node(**attributes):
|
|||
raise utils.DiscoveryFailed('Multiple matching nodes found', code=404)
|
||||
|
||||
uuid = found.pop()
|
||||
drop_node(uuid)
|
||||
return uuid
|
||||
try:
|
||||
row = (db.execute('select started_at from nodes where uuid=?', (uuid,))
|
||||
.fetchone())
|
||||
if not row:
|
||||
LOG.error('Inconsistent database: %s is in attributes table, '
|
||||
'but not in nodes table', uuid)
|
||||
raise utils.DiscoveryFailed('Could not find a node', code=404)
|
||||
|
||||
return NodeInfo(uuid=uuid, started_at=row[0])
|
||||
finally:
|
||||
drop_node(uuid)
|
||||
|
||||
|
||||
def clean_up():
|
||||
|
|
|
@ -96,7 +96,8 @@ class TestProcess(BaseTest):
|
|||
raise exceptions.Conflict()
|
||||
|
||||
cli.port.create.side_effect = fake_port_create
|
||||
pop_mock.return_value = self.node.uuid
|
||||
pop_mock.return_value = node_cache.NodeInfo(uuid=self.node.uuid,
|
||||
started_at=time.time())
|
||||
cli.node.get.return_value = self.node
|
||||
post_mock.return_value = (['fake patch', 'fake patch 2'],
|
||||
{'11:22:33:44:55:66': ['port patch']})
|
||||
|
@ -184,7 +185,8 @@ class TestProcess(BaseTest):
|
|||
def test_not_found_in_ironic(self, client_mock, pop_mock, filters_mock,
|
||||
pre_mock, post_mock):
|
||||
cli = client_mock.return_value
|
||||
pop_mock.return_value = self.node.uuid
|
||||
pop_mock.return_value = node_cache.NodeInfo(uuid=self.node.uuid,
|
||||
started_at=time.time())
|
||||
cli.node.get.side_effect = exceptions.NotFound()
|
||||
|
||||
self.assertRaisesRegexp(utils.DiscoveryFailed,
|
||||
|
@ -595,13 +597,15 @@ class TestNodeCachePop(BaseTest):
|
|||
|
||||
def test_bmc(self):
|
||||
res = node_cache.pop_node(bmc_address='1.2.3.4')
|
||||
self.assertEqual(self.uuid, res)
|
||||
self.assertEqual(self.uuid, res.uuid)
|
||||
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1)
|
||||
self.assertEqual([], self.db.execute(
|
||||
"select * from attributes").fetchall())
|
||||
|
||||
def test_macs(self):
|
||||
res = node_cache.pop_node(mac=['11:22:33:33:33:33', self.macs[1]])
|
||||
self.assertEqual(self.uuid, res)
|
||||
self.assertEqual(self.uuid, res.uuid)
|
||||
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1)
|
||||
self.assertEqual([], self.db.execute(
|
||||
"select * from attributes").fetchall())
|
||||
|
||||
|
@ -618,10 +622,17 @@ class TestNodeCachePop(BaseTest):
|
|||
def test_both(self):
|
||||
res = node_cache.pop_node(bmc_address='1.2.3.4',
|
||||
mac=self.macs)
|
||||
self.assertEqual(self.uuid, res)
|
||||
self.assertEqual(self.uuid, res.uuid)
|
||||
self.assertTrue(time.time() - 60 < res.started_at < time.time() + 1)
|
||||
self.assertEqual([], self.db.execute(
|
||||
"select * from attributes").fetchall())
|
||||
|
||||
def test_inconsistency(self):
|
||||
with self.db:
|
||||
self.db.execute('delete from nodes where uuid=?', (self.uuid,))
|
||||
self.assertRaises(utils.DiscoveryFailed, node_cache.pop_node,
|
||||
bmc_address='1.2.3.4')
|
||||
|
||||
|
||||
class TestPlugins(unittest.TestCase):
|
||||
@patch.object(example_plugin.ExampleProcessingHook, 'pre_discover',
|
||||
|
|
Loading…
Reference in New Issue