Allow extending an existing instance allocation

This patch add a new function to the SchedulerReportClient that allows
extending an existing instance allocation with extra resources.

Change-Id: I1bdc5e4971204fbfbf0dfcd232cabf2cfe02a966
Blueprint: support-interface-attach-with-qos-ports
This commit is contained in:
Balazs Gibizer 2020-10-09 14:33:49 +02:00
parent 672b288324
commit 54865a17cd
2 changed files with 391 additions and 0 deletions

View File

@ -1622,6 +1622,99 @@ class SchedulerReportClient(object):
raise Retry('claim_resources', reason)
return r.status_code == 204
def add_resources_to_instance_allocation(
self,
context: nova_context.RequestContext,
consumer_uuid: str,
resources: ty.Dict[str, ty.Dict[str, ty.Dict[str, int]]],
) -> None:
"""Adds certain resources to the current allocation of the
consumer.
:param context: the request context
:param consumer_uuid: the uuid of the consumer to update
:param resources: a dict of resources in the format of allocation
request. E.g.:
{
<rp_uuid>: {
'resources': {
<resource class>: amount,
<other resource class>: amount
}
}
<other_ rp_uuid>: {
'resources': {
<other resource class>: amount
}
}
}
:raises AllocationUpdateFailed: if there was multiple generation
conflict and we run out of retires.
:raises ConsumerAllocationRetrievalFailed: If the current allocation
cannot be read from placement.
:raises: keystoneauth1.exceptions.base.ClientException on failure to
communicate with the placement API
"""
# TODO(gibi): Refactor remove_resources_from_instance_allocation() to
# also take the same structure for the resources parameter
if not resources:
# nothing to do
return
# This either raises on error, or returns fails if we run out of
# retries due to conflict. Convert that return value to an exception
# too.
if not self._add_resources_to_instance_allocation(
context, consumer_uuid, resources):
error_reason = _(
"Cannot add resources %s to the allocation due to multiple "
"successive generation conflicts in placement.")
raise exception.AllocationUpdateFailed(
consumer_uuid=consumer_uuid,
error=error_reason % resources)
@retries
def _add_resources_to_instance_allocation(
self,
context: nova_context.RequestContext,
consumer_uuid: str,
resources: ty.Dict[str, ty.Dict[str, ty.Dict[str, int]]],
) -> bool:
current_allocs = self.get_allocs_for_consumer(context, consumer_uuid)
for rp_uuid in resources:
if rp_uuid not in current_allocs['allocations']:
current_allocs['allocations'][rp_uuid] = {'resources': {}}
alloc_on_rp = current_allocs['allocations'][rp_uuid]['resources']
for rc, amount in resources[rp_uuid]['resources'].items():
if rc in alloc_on_rp:
alloc_on_rp[rc] += amount
else:
alloc_on_rp[rc] = amount
r = self._put_allocations(context, consumer_uuid, current_allocs)
if r.status_code != 204:
err = r.json()['errors'][0]
if err['code'] == 'placement.concurrent_update':
reason = (
"another process changed the resource providers or the "
"consumer involved in our attempt to update allocations "
"for consumer %s so we cannot add resources %s to the "
"current allocation %s" %
(consumer_uuid, resources, current_allocs))
raise Retry(
'_add_resources_to_instance_allocation', reason)
raise exception.AllocationUpdateFailed(
consumer_uuid=consumer_uuid, error=err['detail'])
return True
def remove_resources_from_instance_allocation(
self, context, consumer_uuid, resources):
"""Removes certain resources from the current allocation of the

View File

@ -3821,6 +3821,304 @@ class TestAllocations(SchedulerReportClientTestCase):
mock_put.assert_has_calls([put_call] * 4)
def _test_add_res_to_alloc(
self, current_allocations, resources_to_add, updated_allocations):
with test.nested(
mock.patch.object(self.client, 'get'),
mock.patch.object(self.client, 'put'),
) as (mock_get, mock_put):
mock_get.return_value = fake_requests.FakeResponse(
200, content=jsonutils.dumps(current_allocations))
mock_put.return_value = fake_requests.FakeResponse(204)
self.client.add_resources_to_instance_allocation(
self.context, uuids.consumer_uuid, resources_to_add)
mock_get.assert_called_once_with(
'/allocations/%s' % uuids.consumer_uuid, version='1.28',
global_request_id=self.context.global_id)
mock_put.assert_called_once_with(
'/allocations/%s' % uuids.consumer_uuid, updated_allocations,
version='1.28', global_request_id=self.context.global_id)
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.put")
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.get")
def test_add_res_to_alloc_empty_addition(self, mock_get, mock_put):
self.client.add_resources_to_instance_allocation(
self.context, uuids.consumer_uuid, {})
mock_get.assert_not_called()
mock_put.assert_not_called()
def test_add_res_to_alloc(self):
current_allocation = {
"allocations": {
uuids.rp1: {
"generation": 42,
"resources": {
'NET_BW_EGR_KILOBIT_PER_SEC': 200,
},
},
},
"consumer_generation": 2,
"project_id": uuids.project_id,
"user_id": uuids.user_id,
}
addition = {
uuids.rp1: {
"resources": {
"FOO": 1, # existing RP but new resource class
"NET_BW_EGR_KILOBIT_PER_SEC": 100, # existing PR and rc
},
},
uuids.rp2: { # new RP
"resources": {
"BAR": 1,
},
},
}
expected_allocation = {
"allocations": {
uuids.rp1: {
"generation": 42,
"resources": {
"FOO": 1,
"NET_BW_EGR_KILOBIT_PER_SEC": 200 + 100,
},
},
uuids.rp2: {
"resources": {
"BAR": 1,
},
},
},
"consumer_generation": 2,
"project_id": uuids.project_id,
"user_id": uuids.user_id,
}
self._test_add_res_to_alloc(
current_allocation, addition, expected_allocation)
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.get")
def test_add_res_to_alloc_failed_to_get_alloc(self, mock_get):
mock_get.side_effect = ks_exc.EndpointNotFound()
addition = {
uuids.rp1: {
"resources": {
"NET_BW_EGR_KILOBIT_PER_SEC": 200,
"NET_BW_IGR_KILOBIT_PER_SEC": 200,
}
}
}
self.assertRaises(
ks_exc.ClientException,
self.client.add_resources_to_instance_allocation,
self.context, uuids.consumer_uuid, addition)
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.put")
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.get")
def test_add_res_to_alloc_failed_to_put_alloc_non_conflict(
self, mock_get, mock_put):
current_allocations = {
"allocations": {
uuids.rp1: {
"generation": 42,
"resources": {
'NET_BW_EGR_KILOBIT_PER_SEC': 200,
},
},
},
"consumer_generation": 2,
"project_id": uuids.project_id,
"user_id": uuids.user_id,
}
mock_get.side_effect = [
fake_requests.FakeResponse(
200, content=jsonutils.dumps(current_allocations)),
]
addition = {
uuids.rp1: {
"resources": {
"NET_BW_EGR_KILOBIT_PER_SEC": 200,
"NET_BW_IGR_KILOBIT_PER_SEC": 200,
}
}
}
mock_put.side_effect = [
fake_requests.FakeResponse(
404,
content=jsonutils.dumps(
{'errors': [
{'code': 'placement.undefined_code', 'detail': ''}]}))
]
self.assertRaises(
exception.AllocationUpdateFailed,
self.client.add_resources_to_instance_allocation,
self.context, uuids.consumer_uuid, addition)
@mock.patch('time.sleep', new=mock.Mock())
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.put")
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.get")
def test_add_res_to_alloc_retry_succeed(self, mock_get, mock_put):
current_allocations = {
"allocations": {
uuids.rp1: {
"generation": 42,
"resources": {
'NET_BW_EGR_KILOBIT_PER_SEC': 200,
},
},
},
"consumer_generation": 2,
"project_id": uuids.project_id,
"user_id": uuids.user_id,
}
current_allocations_2 = copy.deepcopy(current_allocations)
current_allocations_2['consumer_generation'] = 3
addition = {
uuids.rp1: {
"resources": {
"NET_BW_EGR_KILOBIT_PER_SEC": 100,
}
}
}
updated_allocations = {
"allocations": {
uuids.rp1: {
"generation": 42,
"resources": {
'NET_BW_EGR_KILOBIT_PER_SEC': 200 + 100,
},
},
},
"consumer_generation": 2,
"project_id": uuids.project_id,
"user_id": uuids.user_id,
}
updated_allocations_2 = copy.deepcopy(updated_allocations)
updated_allocations_2['consumer_generation'] = 3
mock_get.side_effect = [
fake_requests.FakeResponse(
200, content=jsonutils.dumps(current_allocations)),
fake_requests.FakeResponse(
200, content=jsonutils.dumps(current_allocations_2))
]
mock_put.side_effect = [
fake_requests.FakeResponse(
status_code=409,
content=jsonutils.dumps(
{'errors': [{'code': 'placement.concurrent_update',
'detail': ''}]})),
fake_requests.FakeResponse(
status_code=204)
]
self.client.add_resources_to_instance_allocation(
self.context, uuids.consumer_uuid, addition)
self.assertEqual(
[
mock.call(
'/allocations/%s' % uuids.consumer_uuid,
version='1.28',
global_request_id=self.context.global_id),
mock.call(
'/allocations/%s' % uuids.consumer_uuid,
version='1.28',
global_request_id=self.context.global_id)
],
mock_get.mock_calls)
self.assertEqual(
[
mock.call(
'/allocations/%s' % uuids.consumer_uuid,
updated_allocations, version='1.28',
global_request_id=self.context.global_id),
mock.call(
'/allocations/%s' % uuids.consumer_uuid,
updated_allocations_2, version='1.28',
global_request_id=self.context.global_id),
],
mock_put.mock_calls)
@mock.patch('time.sleep', new=mock.Mock())
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.put")
@mock.patch("nova.scheduler.client.report.SchedulerReportClient.get")
def test_add_res_to_alloc_run_out_of_retries(self, mock_get, mock_put):
current_allocations = {
"allocations": {
uuids.rp1: {
"generation": 42,
"resources": {
'NET_BW_EGR_KILOBIT_PER_SEC': 200,
},
},
},
"consumer_generation": 2,
"project_id": uuids.project_id,
"user_id": uuids.user_id,
}
addition = {
uuids.rp1: {
"resources": {
"NET_BW_EGR_KILOBIT_PER_SEC": 100,
}
}
}
updated_allocations = {
"allocations": {
uuids.rp1: {
"generation": 42,
"resources": {
'NET_BW_EGR_KILOBIT_PER_SEC': 200 + 100,
},
},
},
"consumer_generation": 2,
"project_id": uuids.project_id,
"user_id": uuids.user_id,
}
get_rsp = fake_requests.FakeResponse(
200, content=jsonutils.dumps(current_allocations))
mock_get.side_effect = [get_rsp] * 4
put_rsp = fake_requests.FakeResponse(
status_code=409,
content=jsonutils.dumps(
{'errors': [{'code': 'placement.concurrent_update',
'detail': ''}]}))
mock_put.side_effect = [put_rsp] * 4
ex = self.assertRaises(
exception.AllocationUpdateFailed,
self.client.add_resources_to_instance_allocation,
self.context, uuids.consumer_uuid, addition)
self.assertIn(
'due to multiple successive generation conflicts',
str(ex))
get_call = mock.call(
'/allocations/%s' % uuids.consumer_uuid, version='1.28',
global_request_id=self.context.global_id)
mock_get.assert_has_calls([get_call] * 4)
put_call = mock.call(
'/allocations/%s' % uuids.consumer_uuid, updated_allocations,
version='1.28', global_request_id=self.context.global_id)
mock_put.assert_has_calls([put_call] * 4)
class TestResourceClass(SchedulerReportClientTestCase):
def setUp(self):