Execute CrossCellMigrationTask from MigrationTask

This adds the code to check if a cross-cell move is
allowed by the RequestSpec, which is set in the API,
and if so, checks to see if the scheduler selected
a host in another cell. If so, then CrossCellMigrationTask
is executed.

Note the _restrict_request_spec_to_cell method gets
renamed and logging is adjusted since we may not be
restricting the scheduler to hosts in the same cell
if cross-cell moves are allowed.

Part of blueprint cross-cell-resize

Change-Id: Ibaa31fc8079c16e169472372c05dc48ff0602515
This commit is contained in:
Matt Riedemann 2019-02-07 17:04:58 -05:00
parent accdc16947
commit 425518d198
4 changed files with 168 additions and 35 deletions

View File

@ -16,6 +16,7 @@ from oslo_serialization import jsonutils
from nova import availability_zones
from nova.compute import utils as compute_utils
from nova.conductor.tasks import base
from nova.conductor.tasks import cross_cell_migrate
from nova import exception
from nova.i18n import _
from nova import objects
@ -161,29 +162,60 @@ class MigrationTask(base.TaskBase):
return migration
def _restrict_request_spec_to_cell(self, legacy_props):
# NOTE(danms): Right now we only support migrate to the same
# cell as the current instance, so request that the scheduler
# limit thusly.
def _set_requested_destination_cell(self, legacy_props):
instance_mapping = objects.InstanceMapping.get_by_instance_uuid(
self.context, self.instance.uuid)
LOG.debug('Requesting cell %(cell)s while migrating',
{'cell': instance_mapping.cell_mapping.identity},
instance=self.instance)
if ('requested_destination' in self.request_spec and
if not ('requested_destination' in self.request_spec and
self.request_spec.requested_destination):
self.request_spec.requested_destination.cell = (
instance_mapping.cell_mapping)
# NOTE(takashin): In the case that the target host is specified,
# if the migration is failed, it is not necessary to retry
# the cold migration to the same host. So make sure that
# reschedule will not occur.
if 'host' in self.request_spec.requested_destination:
legacy_props.pop('retry', None)
self.request_spec.retry = None
self.request_spec.requested_destination = objects.Destination()
targeted = 'host' in self.request_spec.requested_destination
# NOTE(mriedem): If the user is allowed to perform a cross-cell resize
# then add the current cell to the request spec as "preferred" so the
# scheduler will (by default) weigh hosts within the current cell over
# hosts in another cell, all other things being equal. If the user is
# not allowed to perform cross-cell resize, then we limit the request
# spec and tell the scheduler to only look at hosts in the current
# cell.
cross_cell_allowed = (
self.request_spec.requested_destination.allow_cross_cell_move)
self.request_spec.requested_destination.cell = (
instance_mapping.cell_mapping)
# NOTE(takashin): In the case that the target host is specified,
# if the migration is failed, it is not necessary to retry
# the cold migration to the same host. So make sure that
# reschedule will not occur.
if targeted:
legacy_props.pop('retry', None)
self.request_spec.retry = None
# Log our plan before calling the scheduler.
if cross_cell_allowed:
LOG.debug('Allowing migration from cell %(cell)s',
{'cell': instance_mapping.cell_mapping.identity},
instance=self.instance)
else:
self.request_spec.requested_destination = objects.Destination(
cell=instance_mapping.cell_mapping)
LOG.debug('Restricting to cell %(cell)s while migrating',
{'cell': instance_mapping.cell_mapping.identity},
instance=self.instance)
def _is_selected_host_in_source_cell(self, selection):
"""Checks if the given Selection is in the same cell as the instance
:param selection: Selection object returned from the scheduler
``select_destinations`` method.
:returns: True if the host Selection is in the same cell as the
instance, False otherwise.
"""
# Note that the context is already targeted to the current cell in
# which the instance exists.
same_cell = selection.cell_uuid == self.context.cell_uuid
if not same_cell:
LOG.debug('Selected target host %s is in cell %s and instance is '
'in cell: %s', selection.service_host,
selection.cell_uuid, self.context.cell_uuid,
instance=self.instance)
return same_cell
def _support_resource_request(self, selection):
"""Returns true if the host is new enough to support resource request
@ -312,7 +344,7 @@ class MigrationTask(base.TaskBase):
# resource requests in a single list and add them to the RequestSpec.
self.request_spec.requested_resources = port_res_req
self._restrict_request_spec_to_cell(legacy_props)
self._set_requested_destination_cell(legacy_props)
# Once _preallocate_migration() is done, the source node allocation is
# moved from the instance consumer to the migration record consumer,
@ -340,7 +372,18 @@ class MigrationTask(base.TaskBase):
# pass the remaining alternates to the compute.
if self.host_list is None:
selection = self._schedule()
if not self._is_selected_host_in_source_cell(selection):
# If the selected host is in another cell, we need to execute
# another task to do the cross-cell migration.
LOG.info('Executing cross-cell resize task starting with '
'target host: %s', selection.service_host,
instance=self.instance)
task = cross_cell_migrate.CrossCellMigrationTask(
self.context, self.instance, self.flavor,
self.request_spec, self._migration, self.compute_rpcapi,
selection, self.host_list)
task.execute()
return
else:
# This is a reschedule that will use the supplied alternate hosts
# in the host_list as destinations.

View File

@ -674,6 +674,7 @@ class _ServersActionsJsonTestMixin(object):
class ServersActionsJsonTest(ServersSampleBase, _ServersActionsJsonTestMixin):
SUPPORTS_CELLS = True
def test_server_reboot_hard(self):
uuid = self._post_server()

View File

@ -34,6 +34,10 @@ class MigrationTaskTestCase(test.NoDBTestCase):
self.user_id = 'fake'
self.project_id = 'fake'
self.context = FakeContext(self.user_id, self.project_id)
# Normally RequestContext.cell_uuid would be set when targeting
# the context in nova.conductor.manager.targets_cell but we just
# fake it here.
self.context.cell_uuid = uuids.cell1
self.flavor = fake_flavor.fake_flavor_obj(self.context)
self.flavor.extra_specs = {'extra_specs': 'fake'}
inst = fake_instance.fake_db_instance(image_ref='image_ref',
@ -83,9 +87,12 @@ class MigrationTaskTestCase(test.NoDBTestCase):
@mock.patch.object(scheduler_utils, 'setup_instance_group')
@mock.patch.object(query.SchedulerQueryClient, 'select_destinations')
@mock.patch.object(compute_rpcapi.ComputeAPI, 'prep_resize')
def _test_execute(self, prep_resize_mock, sel_dest_mock, sig_mock, az_mock,
gmv_mock, cm_mock, sm_mock, cn_mock, rc_mock, gbf_mock,
requested_destination=False):
@mock.patch('nova.conductor.tasks.cross_cell_migrate.'
'CrossCellMigrationTask.execute')
def _test_execute(self, cross_cell_exec_mock, prep_resize_mock,
sel_dest_mock, sig_mock, az_mock, gmv_mock, cm_mock,
sm_mock, cn_mock, rc_mock, gbf_mock,
requested_destination=False, same_cell=True):
sel_dest_mock.return_value = self.host_lists
az_mock.return_value = 'myaz'
gbf_mock.return_value = objects.MigrationList()
@ -95,7 +102,8 @@ class MigrationTaskTestCase(test.NoDBTestCase):
if requested_destination:
self.request_spec.requested_destination = objects.Destination(
host='target_host', node=None)
host='target_host', node=None,
allow_cross_cell_move=not same_cell)
self.request_spec.retry = objects.SchedulerRetries.from_dict(
self.context, self.filter_properties['retry'])
self.filter_properties.pop('retry')
@ -117,11 +125,16 @@ class MigrationTaskTestCase(test.NoDBTestCase):
cn_mock.side_effect = set_migration_uuid
selection = self.host_lists[0][0]
with mock.patch.object(scheduler_utils,
'fill_provider_mapping') as fill_mock:
with test.nested(
mock.patch.object(scheduler_utils,
'fill_provider_mapping'),
mock.patch.object(task, '_is_selected_host_in_source_cell',
return_value=same_cell)
) as (fill_mock, _is_source_cell_mock):
task.execute()
fill_mock.assert_called_once_with(
task.context, task.reportclient, task.request_spec, selection)
_is_source_cell_mock.assert_called_once_with(selection)
self.ensure_network_metadata_mock.assert_called_once_with(
self.instance)
@ -131,13 +144,22 @@ class MigrationTaskTestCase(test.NoDBTestCase):
task.query_client.select_destinations.assert_called_once_with(
self.context, self.request_spec, [self.instance.uuid],
return_objects=True, return_alternates=True)
prep_resize_mock.assert_called_once_with(
self.context, self.instance, self.request_spec.image,
self.flavor, selection.service_host, task._migration,
request_spec=self.request_spec,
filter_properties=self.filter_properties, node=selection.nodename,
clean_shutdown=self.clean_shutdown, host_list=[])
az_mock.assert_called_once_with(self.context, 'host1')
if same_cell:
prep_resize_mock.assert_called_once_with(
self.context, self.instance, self.request_spec.image,
self.flavor, selection.service_host, task._migration,
request_spec=self.request_spec,
filter_properties=self.filter_properties,
node=selection.nodename,
clean_shutdown=self.clean_shutdown, host_list=[])
az_mock.assert_called_once_with(self.context, 'host1')
cross_cell_exec_mock.assert_not_called()
else:
cross_cell_exec_mock.assert_called_once_with()
az_mock.assert_not_called()
prep_resize_mock.assert_not_called()
self.assertIsNotNone(task._migration)
old_flavor = self.instance.flavor
@ -159,6 +181,9 @@ class MigrationTaskTestCase(test.NoDBTestCase):
self.assertIsNone(self.request_spec.retry)
self.assertIn('cell', self.request_spec.requested_destination)
self.assertIsNotNone(self.request_spec.requested_destination.cell)
self.assertEqual(
not same_cell,
self.request_spec.requested_destination.allow_cross_cell_move)
mock_get_resources.assert_called_once_with(
self.context, self.instance.uuid)
@ -175,6 +200,13 @@ class MigrationTaskTestCase(test.NoDBTestCase):
self.flavor.id = 3
self._test_execute()
def test_execute_same_cell_false(self):
"""Tests the execute() scenario that the RequestSpec allows cross
cell move and the selected target host is in another cell so
CrossCellMigrationTask is executed.
"""
self._test_execute(same_cell=False)
@mock.patch.object(objects.MigrationList, 'get_by_filters')
@mock.patch('nova.conductor.tasks.migrate.revert_allocation_for_migration')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient')
@ -826,6 +858,60 @@ class MigrationTaskTestCase(test.NoDBTestCase):
instance=self.instance),
])
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid',
return_value=objects.InstanceMapping(
cell_mapping=objects.CellMapping(uuid=uuids.cell1)))
@mock.patch('nova.conductor.tasks.migrate.LOG.debug')
def test_set_requested_destination_cell_allow_cross_cell_resize_true(
self, mock_debug, mock_get_im):
"""Tests the scenario that the RequestSpec is configured for
allow_cross_cell_resize=True.
"""
task = self._generate_task()
legacy_props = self.request_spec.to_legacy_filter_properties_dict()
self.request_spec.requested_destination = objects.Destination(
allow_cross_cell_move=True)
task._set_requested_destination_cell(legacy_props)
mock_get_im.assert_called_once_with(self.context, self.instance.uuid)
mock_debug.assert_called_once()
self.assertIn('Allowing migration from cell',
mock_debug.call_args[0][0])
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid',
return_value=objects.InstanceMapping(
cell_mapping=objects.CellMapping(uuid=uuids.cell1)))
@mock.patch('nova.conductor.tasks.migrate.LOG.debug')
def test_set_requested_destination_cell_allow_cross_cell_resize_false(
self, mock_debug, mock_get_im):
"""Tests the scenario that the RequestSpec is configured for
allow_cross_cell_resize=False.
"""
task = self._generate_task()
legacy_props = self.request_spec.to_legacy_filter_properties_dict()
# We don't have to explicitly set RequestSpec.requested_destination
# since _set_requested_destination_cell will do that and the
# Destination object will default allow_cross_cell_move to False.
task._set_requested_destination_cell(legacy_props)
mock_get_im.assert_called_once_with(self.context, self.instance.uuid)
mock_debug.assert_called_once()
self.assertIn('Restricting to cell', mock_debug.call_args[0][0])
def test_is_selected_host_in_source_cell_true(self):
"""Tests the scenario that the host Selection from the scheduler is in
the same cell as the instance.
"""
task = self._generate_task()
selection = objects.Selection(cell_uuid=self.context.cell_uuid)
self.assertTrue(task._is_selected_host_in_source_cell(selection))
def test_is_selected_host_in_source_cell_false(self):
"""Tests the scenario that the host Selection from the scheduler is
not in the same cell as the instance.
"""
task = self._generate_task()
selection = objects.Selection(cell_uuid=uuids.cell2, service_host='x')
self.assertFalse(task._is_selected_host_in_source_cell(selection))
class MigrationTaskAllocationUtils(test.NoDBTestCase):
@mock.patch('nova.objects.ComputeNode.get_by_host_and_nodename')

View File

@ -2792,6 +2792,9 @@ class ConductorTaskTestCase(_BaseTaskTestCase, test_compute.BaseTestCase):
inst_obj.availability_zone, project_id=inst_obj.project_id,
user_id=inst_obj.user_id)
@mock.patch.object(migrate.MigrationTask,
'_is_selected_host_in_source_cell',
return_value=True)
@mock.patch.object(objects.InstanceMapping, 'get_by_instance_uuid')
@mock.patch.object(scheduler_utils, 'setup_instance_group')
@mock.patch.object(objects.RequestSpec, 'from_components')
@ -2805,7 +2808,7 @@ class ConductorTaskTestCase(_BaseTaskTestCase, test_compute.BaseTestCase):
def test_cold_migrate_exception_host_in_error_state_and_raise(
self, _preallocate_migration, prep_resize_mock, rollback_mock,
notify_mock, select_dest_mock, metadata_mock, spec_fc_mock,
sig_mock, im_mock):
sig_mock, im_mock, check_cell_mock):
inst_obj = objects.Instance(
image_ref='fake-image_ref',
vm_state=vm_states.STOPPED,