From a326b03339123d79e71edfaca3215def3756b9ae Mon Sep 17 00:00:00 2001 From: Takashi NATSUME Date: Fri, 8 Dec 2017 07:53:53 +0900 Subject: [PATCH] [placement] Add sending global request ID in delete (3) Add the 'X-Openstack-Request-Id' header in the request of DELETE. When deleteing resource provider inventories, the header is added. Subsequent patches will add the header in the other cases. Change-Id: I1dac3d340fe7077095d68f803cf5335ffd5b3364 Partial-Bug: #1734625 --- nova/compute/resource_tracker.py | 3 +- nova/scheduler/client/__init__.py | 7 ++-- nova/scheduler/client/report.py | 15 +++++---- .../openstack/placement/test_report_client.py | 7 ++-- .../unit/compute/test_resource_tracker.py | 5 +-- .../unit/scheduler/client/test_report.py | 33 +++++++++++-------- nova/tests/unit/scheduler/test_client.py | 9 +++-- 7 files changed, 49 insertions(+), 30 deletions(-) diff --git a/nova/compute/resource_tracker.py b/nova/compute/resource_tracker.py index 6768b5083ae1..2a8631741d4f 100644 --- a/nova/compute/resource_tracker.py +++ b/nova/compute/resource_tracker.py @@ -872,6 +872,7 @@ class ResourceTracker(object): inv_data = self.driver.get_inventory(nodename) _normalize_inventory_from_cn_obj(inv_data, compute_node) self.scheduler_client.set_inventory_for_provider( + context, compute_node.uuid, compute_node.hypervisor_hostname, inv_data, @@ -880,7 +881,7 @@ class ResourceTracker(object): # Eventually all virt drivers will return an inventory dict in the # format that the placement API expects and we'll be able to remove # this code branch - self.scheduler_client.update_compute_node(compute_node) + self.scheduler_client.update_compute_node(context, compute_node) if self.pci_tracker: self.pci_tracker.save(context) diff --git a/nova/scheduler/client/__init__.py b/nova/scheduler/client/__init__.py index d96e2d104be6..78808378d3e3 100644 --- a/nova/scheduler/client/__init__.py +++ b/nova/scheduler/client/__init__.py @@ -58,17 +58,18 @@ class SchedulerClient(object): def delete_aggregate(self, context, aggregate): self.queryclient.delete_aggregate(context, aggregate) - def set_inventory_for_provider(self, rp_uuid, rp_name, inv_data, + def set_inventory_for_provider(self, context, rp_uuid, rp_name, inv_data, parent_provider_uuid=None): self.reportclient.set_inventory_for_provider( + context, rp_uuid, rp_name, inv_data, parent_provider_uuid=parent_provider_uuid, ) - def update_compute_node(self, compute_node): - self.reportclient.update_compute_node(compute_node) + def update_compute_node(self, context, compute_node): + self.reportclient.update_compute_node(context, compute_node) def update_instance_info(self, context, host_name, instance_info): self.queryclient.update_instance_info(context, host_name, diff --git a/nova/scheduler/client/report.py b/nova/scheduler/client/report.py index 38c8affe9515..464af3c4ab20 100644 --- a/nova/scheduler/client/report.py +++ b/nova/scheduler/client/report.py @@ -872,7 +872,7 @@ class SchedulerReportClient(object): return False @safe_connect - def _delete_inventory(self, rp_uuid): + def _delete_inventory(self, context, rp_uuid): """Deletes all inventory records for a resource provider with the supplied UUID. @@ -896,7 +896,8 @@ class SchedulerReportClient(object): cur_gen = curr['resource_provider_generation'] url = '/resource_providers/%s/inventories' % rp_uuid - r = self.delete(url, version="1.5") + r = self.delete(url, version="1.5", + global_request_id=context.global_id) placement_req_id = get_placement_request_id(r) msg_args = { 'rp_uuid': rp_uuid, @@ -958,11 +959,12 @@ class SchedulerReportClient(object): msg_args['err'] = r.text LOG.error(msg, msg_args) - def set_inventory_for_provider(self, rp_uuid, rp_name, inv_data, + def set_inventory_for_provider(self, context, rp_uuid, rp_name, inv_data, parent_provider_uuid=None): """Given the UUID of a provider, set the inventory records for the provider to the supplied dict of resources. + :param context: The security context :param rp_uuid: UUID of the resource provider to set inventory for :param rp_name: Name of the resource provider in case we need to create a record for it in the placement API @@ -987,7 +989,7 @@ class SchedulerReportClient(object): if inv_data: self._update_inventory(rp_uuid, inv_data) else: - self._delete_inventory(rp_uuid) + self._delete_inventory(context, rp_uuid) @safe_connect def _ensure_traits(self, traits): @@ -1197,9 +1199,10 @@ class SchedulerReportClient(object): LOG.error(msg, args) raise exception.InvalidResourceClass(resource_class=name) - def update_compute_node(self, compute_node): + def update_compute_node(self, context, compute_node): """Creates or updates stats for the supplied compute node. + :param context: The security context :param compute_node: updated nova.objects.ComputeNode to report :raises `exception.InventoryInUse` if the compute node has had changes to its inventory but there are still active allocations for @@ -1212,7 +1215,7 @@ class SchedulerReportClient(object): if inv_data: self._update_inventory(compute_node.uuid, inv_data) else: - self._delete_inventory(compute_node.uuid) + self._delete_inventory(context, compute_node.uuid) @safe_connect def get_allocations_for_consumer(self, consumer): diff --git a/nova/tests/functional/api/openstack/placement/test_report_client.py b/nova/tests/functional/api/openstack/placement/test_report_client.py index 8810850d7d90..9e103ca00f0d 100644 --- a/nova/tests/functional/api/openstack/placement/test_report_client.py +++ b/nova/tests/functional/api/openstack/placement/test_report_client.py @@ -110,7 +110,7 @@ class SchedulerReportClientTests(test.TestCase): self.assertEqual([], rps) # Now let's update status for our compute node. - self.client.update_compute_node(self.compute_node) + self.client.update_compute_node(self.context, self.compute_node) # So now we have a resource provider rp = self.client._get_resource_provider(self.compute_uuid) @@ -171,7 +171,7 @@ class SchedulerReportClientTests(test.TestCase): self.compute_node.vcpus = 0 self.compute_node.memory_mb = 0 self.compute_node.local_gb = 0 - self.client.update_compute_node(self.compute_node) + self.client.update_compute_node(self.context, self.compute_node) # Check there's no more inventory records resp = self.client.get(inventory_url) @@ -192,7 +192,8 @@ class SchedulerReportClientTests(test.TestCase): } self.assertRaises(exception.InvalidResourceClass, self.client.set_inventory_for_provider, - self.compute_uuid, self.compute_name, inv_data) + self.context, self.compute_uuid, + self.compute_name, inv_data) @mock.patch('keystoneauth1.session.Session.get_endpoint', return_value='http://localhost:80/placement') diff --git a/nova/tests/unit/compute/test_resource_tracker.py b/nova/tests/unit/compute/test_resource_tracker.py index 8de185ac5547..94c9e8d493d9 100644 --- a/nova/tests/unit/compute/test_resource_tracker.py +++ b/nova/tests/unit/compute/test_resource_tracker.py @@ -1201,7 +1201,7 @@ class TestUpdateComputeNode(BaseTestCase): # implemented. self.driver_mock.get_inventory.assert_called_once_with(_NODENAME) ucn_mock = self.sched_client_mock.update_compute_node - ucn_mock.assert_called_once_with(new_compute) + ucn_mock.assert_called_once_with(mock.sentinel.ctx, new_compute) @mock.patch('nova.objects.ComputeNode.save') def test_existing_compute_node_updated_diff_updated_at(self, save_mock): @@ -1249,7 +1249,7 @@ class TestUpdateComputeNode(BaseTestCase): # _normalize_inventory_from_cn_obj but call update_compute_node(). self.assertFalse(norm_mock.called) ucn_mock = self.sched_client_mock.update_compute_node - ucn_mock.assert_called_once_with(new_compute) + ucn_mock.assert_called_once_with(mock.sentinel.ctx, new_compute) @mock.patch('nova.compute.resource_tracker.' '_normalize_inventory_from_cn_obj') @@ -1282,6 +1282,7 @@ class TestUpdateComputeNode(BaseTestCase): self.rt._update(mock.sentinel.ctx, new_compute) save_mock.assert_called_once_with() sifp_mock.assert_called_once_with( + mock.sentinel.ctx, new_compute.uuid, new_compute.hypervisor_hostname, mock.sentinel.inv_data, diff --git a/nova/tests/unit/scheduler/client/test_report.py b/nova/tests/unit/scheduler/client/test_report.py index 3b1d9c389c7f..e28cfd5a64ca 100644 --- a/nova/tests/unit/scheduler/client/test_report.py +++ b/nova/tests/unit/scheduler/client/test_report.py @@ -2263,7 +2263,7 @@ class TestInventory(SchedulerReportClientTestCase): '_update_inventory') def test_update_compute_node(self, mock_ui, mock_delete, mock_erp): cn = self.compute_node - self.client.update_compute_node(cn) + self.client.update_compute_node(self.context, cn) mock_erp.assert_called_once_with(cn.uuid, cn.hypervisor_hostname) expected_inv_data = { 'VCPU': { @@ -2312,9 +2312,9 @@ class TestInventory(SchedulerReportClientTestCase): cn.vcpus = 0 cn.memory_mb = 0 cn.local_gb = 0 - self.client.update_compute_node(cn) + self.client.update_compute_node(self.context, cn) mock_erp.assert_called_once_with(cn.uuid, cn.hypervisor_hostname) - mock_delete.assert_called_once_with(cn.uuid) + mock_delete.assert_called_once_with(self.context, cn.uuid) self.assertFalse(mock_ui.called) @mock.patch.object(report.LOG, 'info') @@ -2340,11 +2340,14 @@ class TestInventory(SchedulerReportClientTestCase): mock_delete.return_value.status_code = 204 mock_delete.return_value.headers = {'x-openstack-request-id': uuids.request_id} - result = self.client._delete_inventory(cn.uuid) + result = self.client._delete_inventory(self.context, cn.uuid) self.assertIsNone(result) self.assertFalse(mock_put.called) self.assertEqual(uuids.request_id, mock_info.call_args[0][1]['placement_req_id']) + mock_delete.assert_called_once_with( + '/resource_providers/%s/inventories' % cn.uuid, + version='1.5', global_request_id=self.context.global_id) @mock.patch('nova.scheduler.client.report._extract_inventory_in_use') @mock.patch('nova.scheduler.client.report.SchedulerReportClient.' @@ -2362,7 +2365,7 @@ class TestInventory(SchedulerReportClientTestCase): 'inventories': { } } - result = self.client._delete_inventory(cn.uuid) + result = self.client._delete_inventory(self.context, cn.uuid) self.assertIsNone(result) self.assertFalse(mock_delete.called) self.assertFalse(mock_extract.called) @@ -2397,7 +2400,7 @@ class TestInventory(SchedulerReportClientTestCase): } mock_put.return_value.headers = {'x-openstack-request-id': uuids.request_id} - result = self.client._delete_inventory(cn.uuid) + result = self.client._delete_inventory(self.context, cn.uuid) self.assertIsNone(result) self.assertTrue(mock_debug.called) self.assertTrue(mock_put.called) @@ -2427,7 +2430,7 @@ class TestInventory(SchedulerReportClientTestCase): } mock_delete.return_value.status_code = 406 mock_put.return_value.status_code = 200 - self.client._delete_inventory(cn.uuid) + self.client._delete_inventory(self.context, cn.uuid) self.assertTrue(mock_debug.called) exp_url = '/resource_providers/%s/inventories' % cn.uuid payload = { @@ -2469,7 +2472,7 @@ class TestInventory(SchedulerReportClientTestCase): } mock_put.return_value.headers = {'x-openstack-request-id': uuids.request_id} - self.client._delete_inventory(cn.uuid) + self.client._delete_inventory(self.context, cn.uuid) self.assertTrue(mock_debug.called) exp_url = '/resource_providers/%s/inventories' % cn.uuid payload = { @@ -2521,7 +2524,7 @@ There was a conflict when trying to complete your request. 'inventories': { } } - result = self.client._delete_inventory(cn.uuid) + result = self.client._delete_inventory(self.context, cn.uuid) self.assertIsNone(result) self.assertTrue(mock_warn.called) self.assertEqual(uuids.request_id, @@ -2556,7 +2559,7 @@ There was a conflict when trying to complete your request. mock_delete.return_value.status_code = 404 mock_delete.return_value.headers = {'x-openstack-request-id': uuids.request_id} - result = self.client._delete_inventory(cn.uuid) + result = self.client._delete_inventory(self.context, cn.uuid) self.assertIsNone(result) self.assertFalse(self.client._provider_tree.exists(cn.uuid)) self.assertTrue(mock_debug.called) @@ -2596,7 +2599,7 @@ There was a conflict when trying to complete your request. } mock_delete.return_value.headers = {'x-openstack-request-id': uuids.request_id} - result = self.client._delete_inventory(cn.uuid) + result = self.client._delete_inventory(self.context, cn.uuid) self.assertIsNone(result) self.assertFalse(mock_warn.called) self.assertTrue(mock_error.called) @@ -3070,6 +3073,7 @@ There was a conflict when trying to complete your request. }, } self.client.set_inventory_for_provider( + self.context, mock.sentinel.rp_uuid, mock.sentinel.rp_name, inv_data, @@ -3105,6 +3109,7 @@ There was a conflict when trying to complete your request. """ inv_data = {} self.client.set_inventory_for_provider( + self.context, mock.sentinel.rp_uuid, mock.sentinel.rp_name, inv_data, @@ -3117,7 +3122,7 @@ There was a conflict when trying to complete your request. self.assertFalse(mock_gocr.called) self.assertFalse(mock_erc.called) self.assertFalse(mock_upd.called) - mock_del.assert_called_once_with(mock.sentinel.rp_uuid) + mock_del.assert_called_once_with(self.context, mock.sentinel.rp_uuid) @mock.patch('nova.scheduler.client.report.SchedulerReportClient.' '_update_inventory') @@ -3171,6 +3176,7 @@ There was a conflict when trying to complete your request. } self.client.set_inventory_for_provider( + self.context, mock.sentinel.rp_uuid, mock.sentinel.rp_name, inv_data, @@ -3197,7 +3203,8 @@ There was a conflict when trying to complete your request. def test_set_inventory_for_provider_with_parent(self, mock_erp): """Ensure parent UUID is sent through.""" self.client.set_inventory_for_provider( - uuids.child, 'junior', {}, parent_provider_uuid=uuids.parent) + self.context, uuids.child, 'junior', {}, + parent_provider_uuid=uuids.parent) mock_erp.assert_called_once_with( uuids.child, 'junior', parent_provider_uuid=uuids.parent) diff --git a/nova/tests/unit/scheduler/test_client.py b/nova/tests/unit/scheduler/test_client.py index 7e1169a4b46e..90b501e89170 100644 --- a/nova/tests/unit/scheduler/test_client.py +++ b/nova/tests/unit/scheduler/test_client.py @@ -99,20 +99,23 @@ class SchedulerClientTestCase(test.NoDBTestCase): def test_update_compute_node(self, mock_update_compute_node): self.assertIsNone(self.client.reportclient.instance) - self.client.update_compute_node(mock.sentinel.cn) + self.client.update_compute_node(mock.sentinel.ctx, mock.sentinel.cn) self.assertIsNotNone(self.client.reportclient.instance) - mock_update_compute_node.assert_called_once_with(mock.sentinel.cn) + mock_update_compute_node.assert_called_once_with( + mock.sentinel.ctx, mock.sentinel.cn) @mock.patch.object(scheduler_report_client.SchedulerReportClient, 'set_inventory_for_provider') def test_set_inventory_for_provider(self, mock_set): self.client.set_inventory_for_provider( + mock.sentinel.ctx, mock.sentinel.rp_uuid, mock.sentinel.rp_name, mock.sentinel.inv_data, ) mock_set.assert_called_once_with( + mock.sentinel.ctx, mock.sentinel.rp_uuid, mock.sentinel.rp_name, mock.sentinel.inv_data, @@ -121,12 +124,14 @@ class SchedulerClientTestCase(test.NoDBTestCase): # Pass the optional parent_provider_uuid mock_set.reset_mock() self.client.set_inventory_for_provider( + mock.sentinel.ctx, mock.sentinel.child_uuid, mock.sentinel.child_name, mock.sentinel.inv_data2, parent_provider_uuid=mock.sentinel.rp_uuid, ) mock_set.assert_called_once_with( + mock.sentinel.ctx, mock.sentinel.child_uuid, mock.sentinel.child_name, mock.sentinel.inv_data2,