diff --git a/manila/share/manager.py b/manila/share/manager.py index f6f83aa862..a4f19b1f5c 100644 --- a/manila/share/manager.py +++ b/manila/share/manager.py @@ -1611,15 +1611,26 @@ class ShareManager(manager.SchedulerDependentManager): helper.apply_new_access_rules(dest_share_instance, share_ref['id']) - self.db.share_instance_update( - context, dest_share_instance['id'], - {'status': constants.STATUS_AVAILABLE, 'progress': '100%'}) - - self.db.share_instance_update(context, src_share_instance['id'], - {'status': constants.STATUS_INACTIVE}) + self._migration_complete_instance(context, share_ref, + src_share_instance['id'], + dest_share_instance['id']) self._migration_delete_instance(context, src_share_instance['id']) + def _migration_complete_instance(self, context, share_ref, + src_instance_id, dest_instance_id): + dest_updates = { + 'status': constants.STATUS_AVAILABLE, + 'progress': '100%' + } + if share_ref.get('replication_type'): + dest_updates['replica_state'] = constants.REPLICA_STATE_ACTIVE + + self.db.share_instance_update(context, dest_instance_id, dest_updates) + + self.db.share_instance_update(context, src_instance_id, + {'status': constants.STATUS_INACTIVE}) + def _migration_delete_instance(self, context, instance_id): # refresh the share instance model @@ -1790,12 +1801,9 @@ class ShareManager(manager.SchedulerDependentManager): raise exception.ShareMigrationFailed(reason=msg) - self.db.share_instance_update( - context, dest_share_instance['id'], - {'status': constants.STATUS_AVAILABLE, 'progress': '100%'}) - - self.db.share_instance_update(context, src_instance_id, - {'status': constants.STATUS_INACTIVE}) + self._migration_complete_instance(context, share_ref, + src_share_instance['id'], + dest_share_instance['id']) helper.delete_instance_and_wait(src_share_instance) diff --git a/manila/tests/share/test_manager.py b/manila/tests/share/test_manager.py index 81cf8484b5..beddd16037 100644 --- a/manila/tests/share/test_manager.py +++ b/manila/tests/share/test_manager.py @@ -5880,6 +5880,7 @@ class ShareManagerTestCase(test.TestCase): mock.Mock(return_value=True)) self.mock_object(self.share_manager.db, 'share_instance_update') self.mock_object(self.share_manager.db, 'share_update') + self.mock_object(self.share_manager, '_migration_complete_instance') self.mock_object(self.share_manager, '_migration_delete_instance') self.mock_object(migration_api.ShareMigrationHelper, 'apply_new_access_rules') @@ -5910,14 +5911,11 @@ class ShareManagerTestCase(test.TestCase): snapshot_mappings, src_server, dest_server) (migration_api.ShareMigrationHelper.apply_new_access_rules. assert_called_once_with(dest_instance, share['id'])) + (self.share_manager._migration_complete_instance. + assert_called_once_with(self.context, share, + src_instance['id'], dest_instance['id'])) self.share_manager._migration_delete_instance.assert_called_once_with( self.context, src_instance['id']) - self.share_manager.db.share_instance_update.assert_has_calls([ - mock.call(self.context, dest_instance['id'], - {'status': constants.STATUS_AVAILABLE, - 'progress': '100%'}), - mock.call(self.context, src_instance['id'], - {'status': constants.STATUS_INACTIVE})]) self.share_manager.db.share_update.assert_called_once_with( self.context, dest_instance['share_id'], {'task_state': constants.TASK_STATE_MIGRATION_COMPLETING}) @@ -5960,6 +5958,7 @@ class ShareManagerTestCase(test.TestCase): mock.Mock(side_effect=[instance, new_instance])) self.mock_object(self.share_manager.db, 'share_instance_update') self.mock_object(self.share_manager.db, 'share_update') + self.mock_object(self.share_manager, '_migration_complete_instance') delete_mock = self.mock_object(migration_api.ShareMigrationHelper, 'delete_instance_and_wait') self.mock_object(migration_api.ShareMigrationHelper, @@ -5975,20 +5974,15 @@ class ShareManagerTestCase(test.TestCase): mock.call(self.context, new_instance['id'], with_share_data=True) ]) - self.share_manager.db.share_instance_update.assert_has_calls([ - mock.call(self.context, new_instance['id'], - {'status': constants.STATUS_AVAILABLE, - 'progress': '100%'}), - mock.call(self.context, instance['id'], - {'status': constants.STATUS_INACTIVE}) - ]) - self.share_manager.db.share_update.assert_called_once_with( self.context, share['id'], {'task_state': constants.TASK_STATE_MIGRATION_COMPLETING}) (migration_api.ShareMigrationHelper.apply_new_access_rules. assert_called_once_with(new_instance, 'fake_id')) delete_mock.assert_called_once_with(instance) + (self.share_manager._migration_complete_instance. + assert_called_once_with(self.context, share, instance['id'], + new_instance['id'])) @ddt.data(constants.TASK_STATE_MIGRATION_DRIVER_IN_PROGRESS, constants.TASK_STATE_MIGRATION_DRIVER_PHASE1_DONE, @@ -6156,6 +6150,31 @@ class ShareManagerTestCase(test.TestCase): (self.share_manager.db.share_snapshot_instance_delete. assert_called_once_with(self.context, snapshot.instance['id'])) + @ddt.data({}, {'replication_type': 'readable'}) + def test__migration_complete_instance(self, kwargs): + src_share = db_utils.create_share() + dest_share = db_utils.create_share(**kwargs) + src_instance_id = src_share['instance']['id'] + dest_instance_id = dest_share['instance']['id'] + src_updates = {'status': constants.STATUS_INACTIVE} + dest_updates = dest_updates = { + 'status': constants.STATUS_AVAILABLE, + 'progress': '100%' + } + if kwargs.get('replication_type'): + replication_info = { + 'replica_state': constants.REPLICA_STATE_ACTIVE} + dest_updates.update(replication_info) + + self.mock_object(self.share_manager.db, 'share_instance_update') + + self.share_manager._migration_complete_instance( + self.context, dest_share, src_instance_id, dest_instance_id) + + self.share_manager.db.share_instance_update.assert_has_calls( + [mock.call(self.context, dest_instance_id, dest_updates), + mock.call(self.context, src_instance_id, src_updates)]) + def test_migration_cancel_invalid(self): share = db_utils.create_share() diff --git a/releasenotes/notes/bug-1927060-fix-replica-state-on-migration-complete-4fb4d8ba59b58505.yaml b/releasenotes/notes/bug-1927060-fix-replica-state-on-migration-complete-4fb4d8ba59b58505.yaml new file mode 100644 index 0000000000..11a9847a12 --- /dev/null +++ b/releasenotes/notes/bug-1927060-fix-replica-state-on-migration-complete-4fb4d8ba59b58505.yaml @@ -0,0 +1,9 @@ +--- +fixes: + - | + Fixed an issue that made migrated shares with replication support to do + not have a share instance with its `replica_state` set to active. Now, + when the share supports replication, the destination share instance will + have its replica state set as active right after the migration gets + completed. For more details, please refer to + `bug 1927060 `_