From 1d7b86f1f0541af11b8df296dd4bb37dc5e25515 Mon Sep 17 00:00:00 2001 From: Dmitry Tantsur Date: Wed, 26 Nov 2014 14:51:03 +0100 Subject: [PATCH] Make /v1/continue synchronous and return real errors Change-Id: Idfe84eaa9bf24937d24126926b569c6004d847e9 Implements: blueprint returning-to-ramdisk --- README.rst | 8 ++++- ironic_discoverd/discoverd.py | 22 ++++++------ ironic_discoverd/main.py | 9 +++-- ironic_discoverd/node_cache.py | 7 ++-- ironic_discoverd/test.py | 65 ++++++++++++++++++++++++++-------- 5 files changed, 80 insertions(+), 31 deletions(-) diff --git a/README.rst b/README.rst index 7daa9acc3..054dd19b9 100644 --- a/README.rst +++ b/README.rst @@ -182,7 +182,12 @@ HTTP API consist of 2 endpoints: * ``mac`` MAC address * ``ip`` IP address - Response: always HTTP 202. + Response: + + * 200 - OK + * 400 - bad request + * 403 - node is not on discovery + * 404 - node cannot be found or multiple nodes found .. _bug #1391866: https://bugs.launchpad.net/ironic-discoverd/+bug/1391866 @@ -192,6 +197,7 @@ Change Log v1.0.0 ~~~~~~ +* ``/v1/continue`` is now sync and errors are returned. * Discovery now times out by default. * Add support for plugins that hook into data processing pipeline, see `plugin-architecture blueprint`_ for details. diff --git a/ironic_discoverd/discoverd.py b/ironic_discoverd/discoverd.py index ff8c85340..f643ac619 100644 --- a/ironic_discoverd/discoverd.py +++ b/ironic_discoverd/discoverd.py @@ -36,7 +36,7 @@ def process(node_info): if node_info.get('error'): LOG.error('Error happened during discovery: %s', node_info['error']) - return + raise utils.DiscoveryFailed(node_info['error']) compat = conf.getboolean('discoverd', 'ports_for_inactive_interfaces') if 'interfaces' not in node_info and 'macs' in node_info: @@ -51,7 +51,8 @@ def process(node_info): if missing: LOG.error('The following required parameters are missing: %s', missing) - return + raise utils.DiscoveryFailed( + 'The following required parameters are missing: %s' % missing) LOG.info('Discovery data received from node with BMC ' '%(ipmi_address)s: CPUs: %(cpus)s %(cpu_arch)s, ' @@ -78,23 +79,21 @@ def process(node_info): uuid = node_cache.pop_node(bmc_address=node_info['ipmi_address'], mac=valid_macs) - if uuid is None: - LOG.debug('Unable to find a node, cannot proceed') - return - ironic = utils.get_client() try: node = ironic.node.get(uuid) except exceptions.NotFound as exc: LOG.error('Node UUID %(uuid)s is in the cache, but not found ' - 'by Ironic: %(exc)s', - {'uuid': uuid, - 'exc': exc}) - return + 'in Ironic: %(exc)s', + {'uuid': uuid, 'exc': exc}) + raise utils.DiscoveryFailed('Node UUID %s was found is cache, ' + 'but is not found in Ironic' % uuid, + code=404) if not node.extra.get('on_discovery'): LOG.error('Node is not on discovery, cannot proceed') - return + raise utils.DiscoveryFailed('Node %s is not on discovery' % uuid, + code=403) _process_node(ironic, node, node_info, valid_macs) @@ -150,6 +149,7 @@ def _process_node(ironic, node, node_info, valid_macs): except Exception as exc: LOG.error('Failed to power off node %s, check it\'s power ' 'management configuration:\n%s', node.uuid, exc) + raise utils.DiscoveryFailed('Failed to power off node %s' % node.uuid) def discover(uuids): diff --git a/ironic_discoverd/main.py b/ironic_discoverd/main.py index 8e4959624..8d8b51eac 100644 --- a/ironic_discoverd/main.py +++ b/ironic_discoverd/main.py @@ -14,6 +14,7 @@ import eventlet eventlet.monkey_patch(thread=False) +import json import logging import sys @@ -36,8 +37,12 @@ LOG = discoverd.LOG def post_continue(): data = request.get_json(force=True) LOG.debug("Got JSON %s, going into processing thread", data) - eventlet.greenthread.spawn_n(discoverd.process, data) - return "{}", 202, {"content-type": "application/json"} + try: + res = discoverd.process(data) + except utils.DiscoveryFailed as exc: + return str(exc), exc.http_code + else: + return json.dumps(res), 200, {'Content-Type': 'applications/json'} @app.route('/v1/discover', methods=['POST']) diff --git a/ironic_discoverd/node_cache.py b/ironic_discoverd/node_cache.py index 283dd2a54..b1969aff6 100644 --- a/ironic_discoverd/node_cache.py +++ b/ironic_discoverd/node_cache.py @@ -109,7 +109,8 @@ def pop_node(**attributes): This function also deletes a node from the cache, thus it's name. :param attributes: attributes known about this node (like macs, BMC etc) - :returns: UUID or None + :returns: UUID + :raises: DiscoveryFailed if node is not found """ # NOTE(dtantsur): sorting is not required, but gives us predictability found = set() @@ -131,13 +132,13 @@ def pop_node(**attributes): if not found: LOG.error('Could not find a node based on attributes %s', list(attributes)) - return + raise utils.DiscoveryFailed('Could not find a node', code=404) elif len(found) > 1: LOG.error('Multiple nodes were matched based on attributes %(keys)s: ' '%(uuids)s', {'keys': list(attributes), 'uuids': list(found)}) - return + raise utils.DiscoveryFailed('Multiple matching nodes found', code=404) uuid = found.pop() drop_node(uuid) diff --git a/ironic_discoverd/test.py b/ironic_discoverd/test.py index 0f14fcdf3..8b4aea31f 100644 --- a/ironic_discoverd/test.py +++ b/ironic_discoverd/test.py @@ -137,12 +137,23 @@ class TestProcess(BaseTest): 'true') self._do_test(client_mock, pop_mock, filters_mock, pre_mock, post_mock) + def test_not_on_discovery(self, client_mock, pop_mock, filters_mock, + pre_mock, post_mock): + del self.node.extra['on_discovery'] + self.assertRaisesRegexp(utils.DiscoveryFailed, + 'not on discovery', + self._do_test, + client_mock, pop_mock, filters_mock, pre_mock, + post_mock) + def test_not_found(self, client_mock, pop_mock, filters_mock, pre_mock, post_mock): cli = client_mock.return_value - pop_mock.return_value = None + pop_mock.side_effect = utils.DiscoveryFailed('boom') - discoverd.process(self.data) + self.assertRaisesRegexp(utils.DiscoveryFailed, + 'boom', + discoverd.process, self.data) self.assertFalse(cli.node.update.called) self.assertFalse(cli.port.create.called) @@ -154,13 +165,29 @@ class TestProcess(BaseTest): pop_mock.return_value = self.node.uuid cli.node.get.side_effect = exceptions.NotFound() - discoverd.process(self.data) + self.assertRaisesRegexp(utils.DiscoveryFailed, + 'not found in Ironic', + discoverd.process, self.data) cli.node.get.assert_called_once_with(self.node.uuid) self.assertFalse(cli.node.update.called) self.assertFalse(cli.port.create.called) self.assertFalse(cli.node.set_power_state.called) + def test_error(self, client_mock, pop_mock, filters_mock, pre_mock, + post_mock): + self.data['error'] = 'BOOM' + self.assertRaisesRegexp(utils.DiscoveryFailed, + 'BOOM', + discoverd.process, self.data) + + def test_missing(self, client_mock, pop_mock, filters_mock, pre_mock, + post_mock): + del self.data['cpus'] + self.assertRaisesRegexp(utils.DiscoveryFailed, + 'missing', + discoverd.process, self.data) + @patch.object(eventlet.greenthread, 'spawn_n', side_effect=lambda f, *a: f(*a) and None) @@ -383,11 +410,21 @@ class TestApi(BaseTest): self.assertFalse(discover_mock.called) keystone_mock.assert_called_once_with(token='token') - @patch.object(eventlet.greenthread, 'spawn_n') - def test_continue(self, spawn_mock): + @patch.object(discoverd, 'process', autospec=True) + def test_continue(self, process_mock): + process_mock.return_value = [42] res = self.app.post('/v1/continue', data='"JSON"') - self.assertEqual(202, res.status_code) - spawn_mock.assert_called_once_with(discoverd.process, "JSON") + self.assertEqual(200, res.status_code) + process_mock.assert_called_once_with("JSON") + self.assertEqual(b'[42]', res.data) + + @patch.object(discoverd, 'process', autospec=True) + def test_continue_failed(self, process_mock): + process_mock.side_effect = utils.DiscoveryFailed("boom") + res = self.app.post('/v1/continue', data='"JSON"') + self.assertEqual(400, res.status_code) + process_mock.assert_called_once_with("JSON") + self.assertEqual(b'boom', res.data) @patch.object(client.requests, 'post', autospec=True) @@ -531,8 +568,8 @@ class TestNodeCachePop(BaseTest): mac=self.macs) def test_no_data(self): - self.assertIsNone(node_cache.pop_node()) - self.assertIsNone(node_cache.pop_node(mac=[])) + self.assertRaises(utils.DiscoveryFailed, node_cache.pop_node) + self.assertRaises(utils.DiscoveryFailed, node_cache.pop_node, mac=[]) def test_bmc(self): res = node_cache.pop_node(bmc_address='1.2.3.4') @@ -547,14 +584,14 @@ class TestNodeCachePop(BaseTest): "select * from attributes").fetchall()) def test_macs_not_found(self): - res = node_cache.pop_node(mac=['11:22:33:33:33:33', - '66:66:44:33:22:11']) - self.assertIsNone(res) + self.assertRaises(utils.DiscoveryFailed, node_cache.pop_node, + mac=['11:22:33:33:33:33', + '66:66:44:33:22:11']) def test_macs_multiple_found(self): node_cache.add_node('uuid2', mac=self.macs2) - res = node_cache.pop_node(mac=[self.macs[0], self.macs2[0]]) - self.assertIsNone(res) + self.assertRaises(utils.DiscoveryFailed, node_cache.pop_node, + mac=[self.macs[0], self.macs2[0]]) def test_both(self): res = node_cache.pop_node(bmc_address='1.2.3.4',