diff --git a/cinder/tests/unit/test_ibm_xiv_ds8k.py b/cinder/tests/unit/test_ibm_xiv_ds8k.py index 66926195b04..947d2a60097 100644 --- a/cinder/tests/unit/test_ibm_xiv_ds8k.py +++ b/cinder/tests/unit/test_ibm_xiv_ds8k.py @@ -63,6 +63,12 @@ REPLICATED_VOLUME = {'size': 64, 'name': REPLICA_FAKE, 'id': 2} +REPLICATION_TARGETS = [{'target_device_id': 'fakedevice'}] +SECONDARY = 'fakedevice' +FAKE_FAILOVER_HOST = 'fakehost@fakebackend#fakepool' +FAKE_PROVIDER_LOCATION = 'fake_provider_location' +FAKE_DRIVER_DATA = 'fake_driver_data' + CONTEXT = {} FAKESNAPSHOT = 'fakesnapshot' @@ -281,6 +287,47 @@ class XIVDS8KFakeProxyDriver(object): return {'status': 'deleted'}, updated_snapshots + def get_replication_updates(self, context): + + return [] + + def replication_disable(self, context, volume): + if volume['replication_status'] == 'invalid_status_val': + raise exception.CinderException() + + volume_model_update = {} + volume_model_update['replication_driver_data'] = FAKE_DRIVER_DATA + + return volume_model_update + + def replication_enable(self, context, volume): + if volume['replication_status'] == 'invalid_status_val': + raise exception.CinderException() + + volume_model_update = {} + volume_model_update['replication_driver_data'] = FAKE_DRIVER_DATA + + return volume_model_update + + def list_replication_targets(self, context, volume): + targets = [] + + for target in REPLICATION_TARGETS: + targets.append({'type': 'managed', + 'target_device_id': target['target_device_id']}) + + return {'volume_id': volume['id'], 'targets': targets} + + def replication_failover(self, context, volume, secondary): + if volume['replication_status'] == 'invalid_status_val': + raise exception.CinderException() + + model_update = {'host': FAKE_FAILOVER_HOST, + 'provider_location': FAKE_PROVIDER_LOCATION, + 'replication_driver_data': FAKE_DRIVER_DATA} + + return model_update + class XIVDS8KVolumeDriverTest(test.TestCase): """Test IBM XIV and DS8K volume driver.""" @@ -944,3 +991,122 @@ class XIVDS8KVolumeDriverTest(test.TestCase): self.assertEqual('available', volumes_model_update['status'], "volumes list status failed") + + def test_get_replication_updates(self): + """Get replication updates.""" + + self.driver.do_setup(None) + + ret = self.driver.get_replication_updates(CONTEXT) + self.assertEqual([], ret) + + def test_replication_disable(self): + """Disable replication on the specified volume.""" + + self.driver.do_setup(None) + + ctxt = context.get_admin_context() + + replicated_volume = copy.deepcopy(REPLICATED_VOLUME) + replicated_volume['replication_status'] = 'enabled' + + volume_model_update = ( + self.driver.replication_disable(ctxt, replicated_volume)) + + self.assertTrue('replication_driver_data' in volume_model_update) + + def test_replication_disable_fail_on_cinder_exception(self): + """Test that replication_disable fails on driver raising exception.""" + + self.driver.do_setup(None) + + replicated_volume = copy.deepcopy(REPLICATED_VOLUME) + # on purpose - set invalid value to replication_status + # expect an exception. + replicated_volume['replication_status'] = 'invalid_status_val' + self.assertRaises( + exception.CinderException, + self.driver.replication_disable, + CONTEXT, + replicated_volume + ) + + def test_replication_enable(self): + """Enable replication on the specified volume.""" + + self.driver.do_setup(None) + + ctxt = context.get_admin_context() + + replicated_volume = copy.deepcopy(REPLICATED_VOLUME) + replicated_volume['replication_status'] = 'disabled' + + volume_model_update = ( + self.driver.replication_enable(ctxt, replicated_volume)) + + self.assertTrue('replication_driver_data' in volume_model_update) + + def test_replication_enable_fail_on_cinder_exception(self): + """Test that replication_enable fails on driver raising exception.""" + + self.driver.do_setup(None) + + replicated_volume = copy.deepcopy(REPLICATED_VOLUME) + # on purpose - set invalid value to replication_status + # expect an exception. + replicated_volume['replication_status'] = 'invalid_status_val' + self.assertRaises( + exception.CinderException, + self.driver.replication_enable, + CONTEXT, + replicated_volume + ) + + def test_list_replication_targets(self): + """Return the list of replication targets for a volume.""" + + self.driver.do_setup(None) + + expected_resp = {'targets': [{'target_device_id': 'fakedevice', + 'type': 'managed'}], + 'volume_id': VOLUME['id']} + targets = self.driver.list_replication_targets(CONTEXT, VOLUME) + self.assertEqual(expected_resp, targets) + + def test_replication_failover(self): + """Test that replication_failover returns successfully. """ + + self.driver.do_setup(None) + + replicated_volume = copy.deepcopy(REPLICATED_VOLUME) + # assume the replication_status should be active + replicated_volume['replication_status'] = 'active' + + expected_resp = {'host': FAKE_FAILOVER_HOST, + 'provider_location': FAKE_PROVIDER_LOCATION, + 'replication_driver_data': FAKE_DRIVER_DATA} + + model_update = self.driver.replication_failover( + CONTEXT, + replicated_volume, + SECONDARY + ) + + self.assertEqual(expected_resp, model_update) + + def test_replication_failover_fail_on_cinder_exception(self): + """Test that replication_failover fails on CinderException. """ + + self.driver.do_setup(None) + + replicated_volume = copy.deepcopy(REPLICATED_VOLUME) + # on purpose - set invalid value to replication_status + # expect an exception. + replicated_volume['replication_status'] = 'invalid_status_val' + self.assertRaises( + exception.CinderException, + self.driver.replication_failover, + CONTEXT, + replicated_volume, + SECONDARY + ) diff --git a/cinder/volume/drivers/ibm/xiv_ds8k.py b/cinder/volume/drivers/ibm/xiv_ds8k.py index 1c7ddce3dd3..daa04601f94 100644 --- a/cinder/volume/drivers/ibm/xiv_ds8k.py +++ b/cinder/volume/drivers/ibm/xiv_ds8k.py @@ -280,3 +280,29 @@ class XIVDS8KDriver(san.SanDriver, return self.xiv_ds8k_proxy.create_consistencygroup_from_src( context, group, volumes, cgsnapshot, snapshots, source_cg, source_vols) + + def get_replication_updates(self, context): + """Get replication updates.""" + + return self.xiv_ds8k_proxy.get_replication_updates(context) + + def replication_disable(self, context, volume): + """Disable replication on the specified volume.""" + + return self.xiv_ds8k_proxy.replication_disable(context, volume) + + def replication_enable(self, context, volume): + """Enable replication on a replication capable volume.""" + + return self.xiv_ds8k_proxy.replication_enable(context, volume) + + def list_replication_targets(self, context, volume): + """Provide a means to obtain replication targets for a volume.""" + + return self.xiv_ds8k_proxy.list_replication_targets(context, volume) + + def replication_failover(self, context, volume, secondary): + """Force failover to a secondary replication target. """ + + return self.xiv_ds8k_proxy.replication_failover( + context, volume, secondary) diff --git a/releasenotes/notes/xiv-ds8k-replication-v2-2a4dcc13401c7584.yaml b/releasenotes/notes/xiv-ds8k-replication-v2-2a4dcc13401c7584.yaml new file mode 100644 index 00000000000..62e39213a92 --- /dev/null +++ b/releasenotes/notes/xiv-ds8k-replication-v2-2a4dcc13401c7584.yaml @@ -0,0 +1,3 @@ +--- +features: + - Adds replication V2 support to IBM XIV/DS8K driver.