Browse Source

Merge "Add sandstone iscsi driver."

tags/16.0.0.0rc1
Zuul 2 months ago
committed by Gerrit Code Review
parent
commit
6221ff7887
9 changed files with 2317 additions and 0 deletions
  1. +3
    -0
      cinder/opts.py
  2. +0
    -0
      cinder/tests/unit/volume/drivers/sandstone/__init__.py
  3. +560
    -0
      cinder/tests/unit/volume/drivers/sandstone/test_sds_client.py
  4. +455
    -0
      cinder/tests/unit/volume/drivers/sandstone/test_sds_driver.py
  5. +54
    -0
      cinder/tests/unit/volume/drivers/sandstone/test_utils.py
  6. +0
    -0
      cinder/volume/drivers/sandstone/__init__.py
  7. +21
    -0
      cinder/volume/drivers/sandstone/constants.py
  8. +711
    -0
      cinder/volume/drivers/sandstone/sds_client.py
  9. +513
    -0
      cinder/volume/drivers/sandstone/sds_driver.py

+ 3
- 0
cinder/opts.py View File

@@ -141,6 +141,8 @@ from cinder.volume.drivers import remotefs as cinder_volume_drivers_remotefs
from cinder.volume.drivers.san.hp import hpmsa_common as \
cinder_volume_drivers_san_hp_hpmsacommon
from cinder.volume.drivers.san import san as cinder_volume_drivers_san_san
from cinder.volume.drivers.sandstone import sds_driver as \
cinder_volume_drivers_sandstone_sdsdriver
from cinder.volume.drivers import solidfire as cinder_volume_drivers_solidfire
from cinder.volume.drivers import storpool as cinder_volume_drivers_storpool
from cinder.volume.drivers.stx import common as \
@@ -258,6 +260,7 @@ def list_opts():
instorage_mcs_opts,
cinder_volume_drivers_inspur_instorage_instorageiscsi.
instorage_mcs_iscsi_opts,
cinder_volume_drivers_sandstone_sdsdriver.sds_opts,
cinder_volume_drivers_veritas_access_veritasiscsi.VA_VOL_OPTS,
cinder_volume_manager.volume_manager_opts,
cinder_wsgi_eventletserver.socket_opts,


+ 0
- 0
cinder/tests/unit/volume/drivers/sandstone/__init__.py View File


+ 560
- 0
cinder/tests/unit/volume/drivers/sandstone/test_sds_client.py View File

@@ -0,0 +1,560 @@
# Copyright (c) 2019 SandStone data Technologies Co., Ltd
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unittest for sds_client."""

import json
from unittest import mock

import requests

from cinder import test
from cinder.tests.unit.volume.drivers.sandstone import test_utils
from cinder.volume.drivers.sandstone import sds_client


class FakeSession(test_utils.FakeBaseSession):
"""Fake request session."""

method_map = {
'post': {
'capacity': {'data': {'capacity_bytes': 1024, 'free_bytes': 1024}},
'pool/list': {'data': [{'status': {'progress': 100},
'pool_name': 'fake_pool',
'realname': 'fake_pool',
'storage_policy': 'fake_replicate',
'domain_name': 'fake_domain',
'pool_id': 3,
'policy_type': 'replicated',
'size': 2}]},
'resource/initiator/list': {'data': {
'results': [{'iqn': 'fake_iqn',
'type': 'iscsi'}]}},
'resource/target/get_target_acl_list': {'data': {
'results': [{'autodiscovery': 'yes',
'name': 'fake_iqn',
'approved': 'yes',
'manual': 'no',
'ip': ''}]}},
'block/gateway/server/list': {'data': [{
'networks': [{'hostid': 'node0001',
'address': '1.1.1.1',
'type': 'iSCSI'}]}]},
'resource/target/list': {'data': {
'results': [{'status': 'fake_state',
'node': ['node0001'],
'name': 'fake_target',
'type': 'iSCSI',
'gateway': [{
'hostid': 'node0001',
'networks': [{
'hostid': 'node0001',
'type': 'iSCSI',
'address': 'fake_address'}],
'hostip': 'fake_hostip'}]}]}},
'resource/target/get_chap_list': {'data': [{
'user': 'fake_chapuser',
'level': 'level1'}]},
'resource/target/get_luns': {'data': {
'results': [{'lid': 1,
'name': 'fake_lun',
'pool_id': 1}]}},
'resource/lun/list': {'data': {
'results': [{'volumeName': 'fake_lun',
'pool_id': 1,
'capacity_bytes': 1024}]}},
'delaytask/list': {'data': {
'results': [{'status': 'completed',
'run_status': 'completed',
'executor': 'LunFlatten',
'progress': 100,
'parameter': {'pool_id': 1,
'lun_name': 'fake_lun'}}]}},
'resource/snapshot/list': {'data': {
'results': [{'snapName': 'fake_snapshot',
'lunName': 'fake_lun'}]}},
}
}


class TestSdsclient(test.TestCase):
"""Testcase sds client."""

def setUp(self):
"""Setup."""
super(TestSdsclient, self).setUp()
self.mock_object(requests, 'Session', FakeSession)
self.client = sds_client.RestCmd('192.168.200.100',
'fake_user',
'fake_password',
True)
self.client.login()

def test_login(self):
"""Test login and check headers."""
self.assertEqual('https://192.168.200.100',
self.client.session.headers['Referer'])
self.assertEqual('fake_token',
self.client.session.headers['X-XSRF-Token'])
self.assertEqual('XSRF-TOKEN=fake_token; username=fake_user; '
'sdsom_sessionid=fake_session',
self.client.session.headers['Cookie'])

def test_logout(self):
"""Test logout."""
retval = self.client.logout()
self.assertIsNone(retval)

def test_query_capacity_info(self):
"""Test query cluster capacity."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_capacity_info()
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'capacity')
self.assertDictEqual({'capacity_bytes': 1024, 'free_bytes': 1024},
retval)

def test_query_pool_info(self):
"""Test query pool status."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_pool_info()
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'pool/list')
self.assertListEqual([{'status': {'progress': 100},
'realname': 'fake_pool',
'pool_name': 'fake_pool',
'storage_policy': 'fake_replicate',
'domain_name': 'fake_domain',
'pool_id': 3,
'policy_type': 'replicated',
'size': 2}], retval)

def test_create_initiator(self):
"""Test create initiator."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.create_initiator(
initiator_name='fake_iqn')
data = json.dumps(
{'iqn': 'fake_iqn', 'type': 'iSCSI',
'remark': 'Cinder iSCSI'})
mocker.assert_called_with(
'https://192.168.200.100/api/storage/'
'resource/initiator/create', data=data)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd,
"_judge_delaytask_status")
def test_add_initiator_to_target(self,
mock__judge_delaytask_status):
"""Test add initiator to target."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
mock__judge_delaytask_status.return_value = None
retval = self.client.add_initiator_to_target(
target_name='fake_target',
initiator_name='fake_iqn')
data = json.dumps(
{'targetName': 'fake_target',
'iqns': [{'ip': '', 'iqn': 'fake_iqn'}]})
mocker.assert_called_with(
'https://192.168.200.100/api/storage/'
'resource/target/add_initiator_to_target', data=data)
self.assertIsNone(retval)

def test_query_initiator_by_name(self):
"""Test query initiator exist or not."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_initiator_by_name(
initiator_name='fake_iqn')
data = json.dumps(
{'initiatorMark': '', 'pageno': 1,
'pagesize': 1000, 'type': 'iSCSI'})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/initiator/list', data=data)
self.assertDictEqual({'iqn': 'fake_iqn',
'type': 'iscsi'}, retval)

def test_query_target_initiatoracl(self):
"""Test query target related initiator info."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_target_initiatoracl(
target_name='fake_target',
initiator_name='fake_iqn')
data = json.dumps(
{'pageno': 1, 'pagesize': 1000,
'targetName': 'fake_target'})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/target/get_target_acl_list', data=data)
self.assertListEqual([{'autodiscovery': 'yes',
'name': 'fake_iqn',
'approved': 'yes',
'manual': 'no',
'ip': ''}], retval)

def test_query_node_by_targetips(self):
"""Test query node id and node ip, relation dict."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_node_by_targetips(
target_ips=['1.1.1.1'])
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'block/gateway/server/list')
self.assertDictEqual({'1.1.1.1': 'node0001'}, retval)

def test_query_target_by_name(self):
"""Test query target exist or not."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_target_by_name(
target_name='fake_target')
data = json.dumps(
{'pageno': 1, 'pagesize': 1000,
"thirdParty": [0, 1],
"targetMark": ""})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/target/list', data=data)
self.assertDictEqual({
'status': 'fake_state',
'node': ['node0001'],
'name': 'fake_target',
'type': 'iSCSI',
'gateway': [{'hostid': 'node0001',
'networks': [{'hostid': 'node0001',
'type': 'iSCSI',
'address': 'fake_address'}],
'hostip': 'fake_hostip'}]}, retval)

def test_create_target(self):
"""Test create target."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.create_target(target_name='fake_target',
targetip_to_hostid=
{'1.1.1.1': 'node0001',
'1.1.1.2': 'node0002',
'1.1.1.3': 'node0003'})
tip_to_hid = {'1.1.1.1': 'node0001',
'1.1.1.2': 'node0002',
'1.1.1.3': 'node0003'}
data = json.dumps(
{"type": "iSCSI", "readOnly": 0,
"thirdParty": 1, "targetName": "fake_target",
"networks": [{"hostid": host_id, "address": address}
for address, host_id
in tip_to_hid.items()]})
mocker.assert_called_with(
'https://192.168.200.100/api/storage/'
'resource/target/create', data=data)
self.assertIsNone(retval)

def test_add_chap_by_target(self):
"""Test add chap to target."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.add_chap_by_target(
target_name='fake_target',
username='fake_chapuser',
password='fake_chappassword')
data = json.dumps(
{"password": "fake_chappassword",
"user": "fake_chapuser", "targetName": "fake_target"})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/target/add_chap', data=data)
self.assertIsNone(retval)

def test_query_chapinfo_by_target(self):
"""Test query target chap info."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_chapinfo_by_target(
target_name='fake_target',
username='fake_chapuser')
data = json.dumps({"targetName": "fake_target"})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/target/get_chap_list', data=data)
self.assertDictEqual({'user': 'fake_chapuser',
'level': 'level1'}, retval)

def test_create_lun(self):
"""Test create lun."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.create_lun(capacity_bytes=1024,
poolid=1,
volume_name='fake_lun')
data = json.dumps({"capacity_bytes": 1024,
"poolId": 1, "priority": "normal",
"qosSettings": {}, "volumeName": 'fake_lun'})
mocker.assert_called_with(
'https://192.168.200.100/api/storage/'
'resource/lun/add', data=data)
self.assertIsNone(retval)

def test_delete_lun(self):
"""Test delete lun."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.delete_lun(poolid=1,
volume_name='fake_lun')
data = json.dumps({"delayTime": 0, "volumeNameList": [{
"poolId": 1,
"volumeName": "fake_lun"}]})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/lun/batch_delete', data=data)
self.assertIsNone(retval)

def test_extend_lun(self):
"""Test resize lun."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.extend_lun(capacity_bytes=2048,
poolid=1,
volume_name='fake_lun')
data = json.dumps({"capacity_bytes": 2048,
"poolId": 1,
"volumeName": 'fake_lun'})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/lun/resize', data=data)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, "_judge_delaytask_status")
@mock.patch.object(sds_client.RestCmd, "query_lun_by_name")
def test_unmap_lun(self, mock_query_lun_by_name,
mock__judge_delaytask_status):
"""Test unmap lun from target."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
mock__judge_delaytask_status.return_value = None
lun_uuid = "c5c8533c-4ce0-11ea-bc01-005056a736f8"
mock_query_lun_by_name.return_value = {'uuid': lun_uuid}
retval = self.client.unmap_lun(target_name='fake_target',
poolid=1,
volume_name='fake_lun',
pool_name='fake_pool')
data = json.dumps({"targetName": "fake_target",
"targetLunList": [lun_uuid],
"targetSnapList": []})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/target/unmap_luns', data=data)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, "_judge_delaytask_status")
@mock.patch.object(sds_client.RestCmd, "query_lun_by_name")
def test_mapping_lun(self, mock_query_lun_by_name,
mock__judge_delaytask_status):
"""Test map lun to target."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
mock__judge_delaytask_status.return_value = None
lun_uuid = "c5c8533c-4ce0-11ea-bc01-005056a736f8"
mock_query_lun_by_name.return_value = {'uuid': lun_uuid}
retval = self.client.mapping_lun(
target_name='fake_target',
poolid=1,
volume_name='fake_lun',
pool_name='fake_pool')
data = json.dumps(
{"targetName": 'fake_target',
"targetLunList": [lun_uuid],
"targetSnapList": []})
mocker.assert_called_with(
'https://192.168.200.100/api/storage/'
'resource/target/map_luns', data=data)
self.assertIsNone(retval)

def test_query_target_lunacl(self):
"""Test query target related lun info."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_target_lunacl(target_name='fake_target',
poolid=1,
volume_name='fake_lun')
data = json.dumps({"pageno": 1, "pagesize": 1000,
"pools": [1],
"targetName": "fake_target"})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/target/get_luns', data=data)
self.assertEqual(1, retval)

def test_query_lun_by_name(self):
"""Test query lun exist or not."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_lun_by_name(
volume_name='fake_lun',
poolid=1)
data = json.dumps(
{"pageno": 1, "pagesize": 1000, "volumeMark": "fake_lun",
"sortType": "time", "sortOrder": "desc",
"pools": [1],
"thirdParty": [0, 1]})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/lun/list', data=data)
self.assertDictEqual({'volumeName': 'fake_lun',
'pool_id': 1,
'capacity_bytes': 1024}, retval)

@mock.patch.object(sds_client.RestCmd, "_judge_delaytask_status")
def test_create_snapshot(self, mock__judge_delaytask_status):
"""Test create snapshot."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
mock__judge_delaytask_status.return_value = None
retval = self.client.create_snapshot(poolid=1,
volume_name='fake_lun',
snapshot_name='fake_snapshot')
data = json.dumps(
{"lunName": "fake_lun",
"poolId": 1,
"remark": "Cinder iSCSI snapshot.",
"snapName": "fake_snapshot"})
mocker.assert_called_with(
'https://192.168.200.100/api/storage/'
'resource/snapshot/add', data=data)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, "_judge_delaytask_status")
def test_delete_snapshot(self, mock__judge_delaytask_status):
"""Test delete snapshot."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
mock__judge_delaytask_status.return_value = None
retval = self.client.delete_snapshot(poolid=1,
volume_name='fake_lun',
snapshot_name='fake_snapshot')
data = json.dumps(
{"lunName": "fake_lun", "poolId": 1,
"snapName": "fake_snapshot"})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/snapshot/delete', data=data)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, "flatten_lun")
@mock.patch.object(sds_client.RestCmd, "_judge_delaytask_status")
def test_create_lun_from_snapshot(self, mock__judge_delaytask_status,
mock_flatten_lun):
"""Test create lun from snapshot."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
mock__judge_delaytask_status.return_value = None
mock_flatten_lun.return_value = None
retval = self.client.create_lun_from_snapshot(
snapshot_name='fake_snapshot',
src_volume_name='fake_src_lun',
poolid=1,
dst_volume_name='fake_dst_lun')
data = json.dumps(
{"snapshot": {"poolId": 1,
"lunName": "fake_src_lun",
"snapName": "fake_snapshot"},
"cloneLun": {"lunName": "fake_dst_lun",
"poolId": 1}})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/snapshot/clone', data=data)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, "_judge_delaytask_status")
def test_flatten_lun(self, mock__judge_delaytask_status):
"""Test flatten lun."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
mock__judge_delaytask_status.return_value = None
retval = self.client.flatten_lun(volume_name='fake_lun',
poolid=1)
data = json.dumps(
{"poolId": 1,
"volumeName": "fake_lun"})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/lun/flatten', data=data)
self.assertIsNone(retval)

def test_query_flatten_lun_process(self):
"""Test query flatten process."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_flatten_lun_process(
poolid=1,
volume_name='fake_lun')
data = json.dumps({"pageno": 1, "pagesize": 20})
mocker.assert_called_once_with(
'https://192.168.200.100/api/om/'
'delaytask/list', data=data)
self.assertDictEqual({'status': 'completed',
'run_status': 'completed',
'executor': 'LunFlatten',
'progress': 100,
'parameter': {'pool_id': 1,
'lun_name': 'fake_lun'}},
retval)

@mock.patch.object(sds_client.RestCmd, "create_snapshot")
@mock.patch.object(sds_client.RestCmd, "create_lun_from_snapshot")
@mock.patch.object(sds_client.RestCmd, "flatten_lun")
@mock.patch.object(sds_client.RestCmd, "delete_snapshot")
def test_create_lun_from_lun(self, mock_delete_snapshot,
mock_flatten_lun,
mock_create_lun_from_snapshot,
mock_create_snapshot):
"""Test create clone lun."""
self.client = sds_client.RestCmd(
"https://192.168.200.100",
"fake_user", "fake_password", True)
mock_create_snapshot.return_value = {'success': 1}
mock_create_lun_from_snapshot.return_value = {'success': 1}
mock_flatten_lun.return_value = {'success': 1}
mock_delete_snapshot.return_value = {'success': 1}
retval = self.client.create_lun_from_lun(
dst_volume_name='fake_dst_lun',
poolid=1,
src_volume_name='fake_src_lun')
self.assertIsNone(retval)

def test_query_snapshot_by_name(self):
"""Test query snapshot exist or not."""
with mock.patch.object(self.client.session, 'post',
wraps=self.client.session.post) as mocker:
retval = self.client.query_snapshot_by_name(
volume_name='fake_lun',
poolid=1,
snapshot_name='fake_snapshot')
data = json.dumps(
{"lunName": "fake_lun", "pageno": 1,
"pagesize": 1000, "poolId": 1,
"snapMark": ""})
mocker.assert_called_once_with(
'https://192.168.200.100/api/storage/'
'resource/snapshot/list', data=data)
self.assertListEqual([{'snapName': 'fake_snapshot',
'lunName': 'fake_lun'}], retval)

+ 455
- 0
cinder/tests/unit/volume/drivers/sandstone/test_sds_driver.py View File

@@ -0,0 +1,455 @@
# Copyright (c) 2019 SandStone data Technologies Co., Ltd
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unittest for sds_client."""
from unittest import mock
import uuid

import ddt
from oslo_utils import units

from cinder import exception
from cinder import objects
from cinder import test
from cinder.volume import configuration as config
from cinder.volume.drivers.san import san
from cinder.volume.drivers.sandstone import sds_client
from cinder.volume.drivers.sandstone import sds_driver


class FakeSdsBaseDriver(sds_driver.SdsBaseDriver):
"""Fake sds base driver."""

def __init__(self):
"""Init conf client pool sds_client."""
self.configuration = config.Configuration(None)
self.configuration.append_config_values(sds_driver.sds_opts)
self.configuration.append_config_values(san.san_opts)
self.configuration.suppress_requests_ssl_warnings = True
self.client = None
self.poolid = 1
self.VERSION = '1.0'
self.address = "192.168.200.100"
self.user = "fake_user"
self.password = "fake_password"
self.pool = "fake_pool_name"
self.iscsi_info = {"iqn.1994-05.com.redhat:899c5f9d15d":
"1.1.1.1,1.1.1.2,1.1.1.3"}
self.default_target_ips = ["1.1.1.1", "1.1.1.2", "1.1.1.3"]
self.default_chap_info = "1234567891234,123456789123"


@ddt.ddt
class TestSdsBaseDriver(test.TestCase):
"""Testcase sds base driver."""

def setUp(self):
"""Setup."""
super(TestSdsBaseDriver, self).setUp()
self.fake_driver = FakeSdsBaseDriver()
self.fake_driver.client = sds_client.RestCmd('192.168.200.100',
'fake_user',
'fake_password',
True)

# @mock.patch.object(sds_client.RestCmd, 'login')
def test_do_setup(self):
"""Do setup."""
self.fake_driver.client = sds_client.RestCmd(
'fake_rest_ip', 'user', 'password', True)
self.fake_driver.configuration.san_ip = 'fake_rest_ip'
self.fake_driver.configuration.san_login = 'fake_san_user'
self.fake_driver.configuration.san_password = 'fake_san_password'
self.fake_driver.do_setup('context')

@mock.patch.object(sds_client.RestCmd, 'query_pool_info')
@mock.patch.object(sds_client.RestCmd, 'get_poolid_from_poolname')
@mock.patch.object(sds_client.RestCmd, 'login')
def test_check_for_setup_error(self, mock_login,
mock_get_poolid_from_poolname,
mock_query_pool_info):
"""Test pool status health or not."""
result1 = [
{'status': {'progress': 33, 'state': ['degraded'], 'flags': 4},
'pool_name': 'fake_pool_name', 'used': 1792950890,
'display_name': 'data', 'replicated_size': 2,
'storage_policy': '2', 'domain_name': 'sandstone',
'pool_id': 3, 'min_size': 1, 'erasure_code_profile': '',
'policy_type': 'replicated', 'rule_id': 1,
'size': 2},
{'status': {'progress': 33, 'state': ['degraded'], 'flags': 4},
'pool_name': 'vms1', 'used': 1792950890,
'display_name': 'data', 'replicated_size': 2,
'storage_policy': '2', 'domain_name': 'sandstone',
'pool_id': 3, 'min_size': 1, 'erasure_code_profile': '',
'policy_type': 'replicated', 'rule_id': 1,
'size': 2}]
result2 = [
{'status': {'progress': 33, 'state': ['degraded'], 'flags': 4},
'pool_name': 'vms', 'used': 1792950890,
'display_name': 'data', 'replicated_size': 2,
'storage_policy': '2', 'domain_name': 'sandstone',
'pool_id': 3, 'min_size': 1, 'erasure_code_profile': '',
'policy_type': 'replicated', 'rule_id': 1,
'size': 2},
{'status': {'progress': 33, 'state': ['degraded'], 'flags': 4},
'pool_name': 'vms1', 'used': 1792950890,
'display_name': 'data', 'replicated_size': 2,
'storage_policy': '2', 'domain_name': 'sandstone',
'pool_id': 3, 'min_size': 1, 'erasure_code_profile': '',
'policy_type': 'replicated', 'rule_id': 1,
'size': 2}]

mock_login.return_value = {"success": 1}
mock_get_poolid_from_poolname.return_value = (
{"fake_pool_name": 3})
mock_query_pool_info.return_value = result1
retval = self.fake_driver.check_for_setup_error()
self.assertIsNone(retval)
mock_query_pool_info.return_value = result2
try:
self.fake_driver.check_for_setup_error()
except Exception as e:
self.assertEqual(exception.InvalidInput, type(e))

@mock.patch.object(sds_client.RestCmd, 'query_capacity_info')
def test__update_volume_stats(self, mock_query_capacity_info):
"""Get cluster capacity."""
result1 = {
"capacity_bytes": 2 * units.Gi,
"free_bytes": units.Gi
}
mock_query_capacity_info.return_value = result1
retval = self.fake_driver._update_volume_stats(
pool_name="fake_pool_name")
self.assertDictEqual(
{"pools": [dict(
pool_name="fake_pool_name",
vendor_name = 'SandStone USP',
driver_version = self.fake_driver.VERSION,
total_capacity_gb=2.0,
free_capacity_gb=1.0,
QoS_support=True,
thin_provisioning_support=True,
multiattach=False,)
]}, retval)
mock_query_capacity_info.assert_called_once_with()

@mock.patch.object(sds_driver.SdsBaseDriver, 'get_volume_stats')
def test_get_volume_stats(self, mock_get_volume_stats):
"""Get cluster capacitys."""
result1 = {"pool": dict(
pool_name="fake_pool_name",
total_capacity_gb=2.0,
free_capacity_gb=1.0,
QoS_support=True,
thin_provisioning_support=True,
multiattach=False,)}
mock_get_volume_stats.return_value = result1
retval = self.fake_driver.get_volume_stats()
self.assertDictEqual(
{"pool": dict(
pool_name="fake_pool_name",
total_capacity_gb=2.0,
free_capacity_gb=1.0,
QoS_support=True,
thin_provisioning_support=True,
multiattach=False,
)}, retval)

@mock.patch.object(sds_client.RestCmd, 'create_lun')
def test_create_volume(self, mock_create_lun):
"""Test create volume."""
volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
mock_create_lun.return_value = {'success': 1}
retval = self.fake_driver.create_volume(volume=volume)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, 'delete_lun')
def test_delete_volume(self, mock_delete_):
"""Test delete volume."""
mock_delete_.return_value = {'success': 1}
volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
retval = self.fake_driver.delete_volume(volume)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, 'extend_lun')
@mock.patch.object(sds_client.RestCmd, 'create_lun_from_snapshot')
def test_create_volume_from_snapshot(self, mock_lun_from_snapshot,
mock_extend_lun):
"""Test create new volume from snapshot of src volume."""
volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
snapshot = objects.Snapshot(
id=uuid.uuid4(), volume_size=2, volume=volume)
mock_lun_from_snapshot.return_value = {'success': 1}
mock_extend_lun.return_value = {'success': 1}
retval = self.fake_driver.create_volume_from_snapshot(volume, snapshot)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, 'extend_lun')
@mock.patch.object(sds_client.RestCmd, 'create_lun_from_lun')
@mock.patch.object(sds_driver.SdsBaseDriver, '_check_volume_exist')
def test_create_cloned_volume(self, mock__check_volume_exist,
mock_create_lun_from_lun,
mock_extend_lun):
"""Test create clone volume."""
mock__check_volume_exist.return_value = True
mock_create_lun_from_lun.return_value = {'success': 1}
mock_extend_lun.return_value = {'success': 1}
dst_volume = objects.Volume(_name_id=uuid.uuid4(), size=2)
src_volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
retval = self.fake_driver.create_cloned_volume(dst_volume, src_volume)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, 'query_lun_by_name')
def test__check_volume_exist(self, mock_query_lun_by_name):
"""Test volume exist or not."""
mock_query_lun_by_name.return_value = {'success': 1}
volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
retval = self.fake_driver._check_volume_exist(volume)
self.assertEqual({'success': 1}, retval)

@mock.patch.object(sds_client.RestCmd, 'extend_lun')
@mock.patch.object(sds_driver.SdsBaseDriver, '_check_volume_exist')
def test_extend_volume(self, mock__check_volume_exist, mock_extend_lun):
"""Test resize volume."""
volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
new_size = 3
mock__check_volume_exist.return_value = {
'capacity_bytes': units.Gi * 1}
mock_extend_lun.return_value = {'success': 1}
retval = self.fake_driver.extend_volume(volume, new_size)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, 'create_snapshot')
def test_create_snapshot(self, mock_create_snapshot):
"""Test create snapshot of volume."""
volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
snapshot = objects.Snapshot(
id=uuid.uuid4(), volume_size=2, volume=volume)
mock_create_snapshot.return_value = {'success': 1}
retval = self.fake_driver.create_snapshot(snapshot)
self.assertIsNone(retval)

@mock.patch.object(sds_client.RestCmd, 'query_snapshot_by_name')
def test__check_snapshot_exist(self, mock_query_snapshot_by_name):
"""Test snapshot exist or not."""
volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
snapshot = objects.Snapshot(
id=uuid.uuid4(), volume_size=2, volume=volume)
mock_query_snapshot_by_name.return_value = {'success': 1}
retval = self.fake_driver._check_snapshot_exist(snapshot)
self.assertEqual({'success': 1}, retval)

@mock.patch.object(sds_client.RestCmd, 'delete_snapshot')
@mock.patch.object(sds_driver.SdsBaseDriver, '_check_snapshot_exist')
def test_delete_snapshot(self, mock__check_snapshot_exist,
mock_delete_snapshot):
"""Test delete snapshot."""
volume = objects.Volume(_name_id=uuid.uuid4(), size=1)
snapshot = objects.Snapshot(
id=uuid.uuid4(), volume_size=2, volume=volume)
mock__check_snapshot_exist.return_value = True
mock_delete_snapshot.return_value = {'success': 1}
retval = self.fake_driver.delete_snapshot(snapshot)
self.assertIsNone(retval)


class FakeSdsISCSIDriver(sds_driver.SdsISCSIDriver):
"""Fake sds iscsi driver, include attach, detach."""

def __init__(self):
"""Init conf client pool."""
self.configuration = config.Configuration(None)
self.client = None
self.address = "192.168.200.100"
self.user = "fake_user"
self.password = "fake_password"
self.pool = "fake_pool_name"
self.poolid = 1
self.iscsi_info = {"iqn.1994-05.com.redhat:899c5f9d15d":
"1.1.1.1,1.1.1.2,1.1.1.3"}
self.default_target_ips = ["1.1.1.1", "1.1.1.2", "1.1.1.3"]
self.chap_username = "123456789123"
self.chap_password = "1234567891234"


@ddt.ddt
class TestSdsISCSIDriver(test.TestCase):
"""Testcase sds iscsi driver, include attach, detach."""

def setUp(self):
"""Setup."""
super(TestSdsISCSIDriver, self).setUp()
self.fake_driver = FakeSdsISCSIDriver()
self.fake_driver.client = sds_client.RestCmd("192.168.200.100",
"fake_user",
"fake_password",
True)

@mock.patch.object(sds_client.RestCmd, 'query_target_by_name')
def test__check_target_exist(self, mock_query_target_by_name):
"""Test target exist or not."""
target_name = 'test_driver'
mock_query_target_by_name.return_value = {'success': 1}
retval = self.fake_driver._check_target_exist(target_name)
self.assertEqual({'success': 1}, retval)

@mock.patch.object(sds_client.RestCmd, 'query_initiator_by_name')
def test__check_initiator_exist(self, mock_query_initiator_by_name):
"""Test initiator exist or not."""
initiator_name = 'test_driver'
mock_query_initiator_by_name.return_value = {'success': 1}
retval = self.fake_driver._check_initiator_exist(initiator_name)
self.assertEqual({'success': 1}, retval)

@mock.patch.object(sds_client.RestCmd, 'query_target_initiatoracl')
def test__check_target_added_initiator(self,
mock_query_target_initiatoracl):
"""Test target added the initiator."""
mock_query_target_initiatoracl.return_value = {'success': 1}
target_name, initiator_name = 'test_driver', 'initiator_name'
retval = self.fake_driver._check_target_added_initiator(target_name,
initiator_name)
self.assertEqual({'success': 1}, retval)

@mock.patch.object(sds_client.RestCmd, 'query_target_lunacl')
def test__check_target_added_lun(self, mock_query_target_lunacl):
"""Test target added the lun."""
mock_query_target_lunacl.return_value = {'success': 1}
target_name, pool_name, volume_name = ('ccc', self.fake_driver.pool,
'fcc')
retval = self.fake_driver._check_target_added_lun(target_name,
pool_name,
volume_name)
self.assertEqual({'success': 1}, retval)

@mock.patch.object(sds_client.RestCmd, 'query_chapinfo_by_target')
def test__check_target_added_chap(self, mock_query_chapinfo_by_target):
"""Test target added chapuser."""
mock_query_chapinfo_by_target.return_value = {'success': 1}
target_name, user_name = 'ccc', 'fcc'
retval = self.fake_driver._check_target_added_chap(target_name,
user_name)
self.assertEqual({'success': 1}, retval)

def test__get_target_ip(self):
"""Test get target from targetip."""
initiator = 'iqn.1994-05.com.redhat:899c5f9d15d'
retval_target_ips = \
self.fake_driver._get_target_ip(initiator)
self.assertListEqual(['1.1.1.1', '1.1.1.2', '1.1.1.3'],
retval_target_ips)

self.fake_driver.default_target_ips = \
["1.1.1.1"]
initiator = 'vms'
retval_target_ips = \
self.fake_driver._get_target_ip(initiator)
self.assertListEqual(["1.1.1.1"], retval_target_ips)

@mock.patch.object(sds_client.RestCmd, 'add_chap_by_target')
@mock.patch.object(sds_driver.SdsISCSIDriver, '_check_target_added_chap')
@mock.patch.object(sds_driver.SdsISCSIDriver, '_check_target_added_lun')
@mock.patch.object(sds_client.RestCmd, 'mapping_lun')
@mock.patch.object(sds_client.RestCmd, 'add_initiator_to_target')
@mock.patch.object(sds_driver.SdsISCSIDriver,
'_check_target_added_initiator')
@mock.patch.object(sds_client.RestCmd, 'create_initiator')
@mock.patch.object(sds_driver.SdsISCSIDriver, '_check_initiator_exist')
@mock.patch.object(sds_client.RestCmd, 'create_target')
@mock.patch.object(sds_client.RestCmd, 'query_node_by_targetips')
@mock.patch.object(sds_driver.SdsISCSIDriver, '_check_target_exist')
@mock.patch.object(sds_driver.SdsISCSIDriver, '_get_target_ip')
def test_initialize_connection(self, mock__get_target_ip,
mock__check_target_exist,
mock_query_node_by_targetips,
mock_create_target,
mock__check_initiator_exist,
mock_create_initiator,
mock__check_target_added_initiator,
mock_add_initiator_to_target,
mock_mapping_lun,
mock__check_target_added_lun,
mock__check_target_added_chap,
mock_add_chap_by_target):
"""Test attach volume to kvm."""
mock__get_target_ip.return_value = (['1.1.1.1', '1.1.1.2', '1.1.1.3'])
mock__check_target_exist.return_value = False
mock__check_initiator_exist.return_value = False
mock__check_target_added_initiator.result_value = False
mock__check_target_added_chap.return_value = False
mock_query_node_by_targetips.return_value = {'host_id', 'address'}
mock_create_target.return_value = {'success': 1}
mock_create_initiator.return_value = {'success': 1}
mock_add_initiator_to_target.result_value = {'success': 1}
mock_mapping_lun.return_value = {'success': 1}
mock__check_target_added_lun.return_value = 1
mock_add_chap_by_target.return_value = {'success': 1}

volume1, connector1 = (objects.Volume(id=uuid.uuid4(),
_name_id=uuid.uuid4(), size=1),
{'initiator':
'iqn.1994-05.com.redhat:899c5f9d15d',
'multipath': True})
initiator_name = connector1['initiator']
iqn_end = initiator_name.split(':', 1)[1]
target_head = 'iqn.2014-10.com.szsandstone:storage:'
target_name = target_head + iqn_end
result1 = {
'driver_volume_type': 'iscsi',
'data': {'target_discovered': True,
'target_portals': ['1.1.1.1:3260',
'1.1.1.2:3260',
'1.1.1.3:3260'],
'volume_id': volume1.id,
'auth_method': 'CHAP',
'auth_username': '123456789123',
'auth_password': '1234567891234',
'target_iqns': [target_name, target_name, target_name],
'target_luns': [1, 1, 1]}}
retval = self.fake_driver.initialize_connection(volume1, connector1)
self.assertDictEqual(result1, retval)

volume2, connector2 = (objects.Volume(id=uuid.uuid4(),
_name_id=uuid.uuid4(),
size=2),
{'initiator':
'iqn.1994-05.com.redhat:899c5f9d15d'})
mock__get_target_ip.return_value = (['1.1.1.1', '1.1.1.2', '1.1.1.3'])
initiator_name = connector2['initiator']
iqn_end = initiator_name.split(':', 1)[1]
target_head = 'iqn.2014-10.com.szsandstone:storage:'
target_name = target_head + iqn_end
result2 = {'driver_volume_type': 'iscsi',
'data': {'target_discovered': True,
'target_portal': '1.1.1.1:3260',
'volume_id': volume2.id,
'target_iqn': target_name,
'target_lun': 1,
'auth_method': 'CHAP',
'auth_username': '123456789123',
'auth_password': '1234567891234'}}
retval = self.fake_driver.initialize_connection(volume2, connector2)
self.assertDictEqual(result2, retval)

@mock.patch.object(sds_client.RestCmd, 'unmap_lun')
def test_terminate_connection(self, mock_unmap_lun):
"""Test detach volume from kvm."""
volume, connector = (objects.Volume(_name_id=uuid.uuid4(), size=1),
{'initiator':
'iqn.1994-05.com.redhat:899c5f9d15d'})
mock_unmap_lun.result_value = {'success': 1}
retval = self.fake_driver.terminate_connection(volume, connector)
self.assertIsNone(retval)

+ 54
- 0
cinder/tests/unit/volume/drivers/sandstone/test_utils.py View File

@@ -0,0 +1,54 @@
# Copyright (c) 2019 ShenZhen SandStone Data Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import re

import requests


class FakeBaseSession(requests.Session):
"""Redefine get and post method, fake it."""

method_map = {}

def _get_response(self, method, url):
url_map = self.method_map.get(method, {})
tmp = None
data = {}
for k in url_map:
if re.search(k, url):
if not tmp or len(tmp) < len(k):
data = url_map[k]
tmp = k

resp_content = {'success': 1}
resp_content.update(data)
resp = requests.Response()
resp.cookies['XSRF-TOKEN'] = 'fake_token'
resp.headers['Referer'] = 'fake_refer'
resp.headers['Set-Cookie'] = 'sdsom_sessionid=fake_session;'
resp.status_code = 200
resp.encoding = 'utf-8'
resp._content = json.dumps(resp_content).encode('utf-8')

return resp

def get(self, url, **kwargs):
"""Redefine get method."""
return self._get_response('get', url)

def post(self, url, **kwargs):
"""Redefine post method."""
return self._get_response('post', url)

+ 0
- 0
cinder/volume/drivers/sandstone/__init__.py View File


+ 21
- 0
cinder/volume/drivers/sandstone/constants.py View File

@@ -0,0 +1,21 @@
# Copyright (c) 2019 ShenZhen SandStone Data Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SandStone iSCSI Driver Const."""

CONNECT_ERROR = 403

BASIC_URI = '/api/storage/'
OM_URI = '/api/om/'
PAGESIZE = 1000

+ 711
- 0
cinder/volume/drivers/sandstone/sds_client.py View File

@@ -0,0 +1,711 @@
# Copyright (c) 2019 ShenZhen SandStone Data Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SandStone iSCSI Driver."""

import hashlib
import json
import re
import time

from oslo_log import log as logging
import requests
import six

from cinder import exception
from cinder.i18n import _
from cinder.volume.drivers.sandstone import constants

LOG = logging.getLogger(__name__)


class RestCmd(object):
"""Restful api class."""

def __init__(self, address, user, password,
suppress_requests_ssl_warnings):
"""Init RestCmd class.

:param address: Restapi uri.
:param user: login web username.
:param password: login web password.
"""
self.address = "https://%(address)s" % {"address": address}
self.user = user
self.password = password
self.pagesize = constants.PAGESIZE
self.session = None
self.short_wait = 10
self.long_wait = 12000
self.debug = True
self._init_http_header()

def _init_http_header(self):
self.session = requests.Session()
self.session.headers.update({
"Content-Type": "application/json",
"Connection": "keep-alive",
"Accept-Encoding": "gzip, deflate",
})
self.session.verify = False

def run(self, url, method, data=None, json_flag=True,
filter_flag=False, om_op_flag=False):
"""Run rest cmd function.

:param url: rest api uri resource.
:param data: rest api uri json parameter.
:param filter_flag: controller whether filter log. (default 'No')
:param om_op_flag: api op have basic and om, use different prefix uri.
"""
kwargs = {}
if data:
kwargs["data"] = json.dumps(data)
if om_op_flag:
rest_url = self.address + constants.OM_URI + url
else:
rest_url = self.address + constants.BASIC_URI + url

func = getattr(self.session, method.lower())
try:
result = func(rest_url, **kwargs)
except requests.RequestException as err:
msg = _('Bad response from server: %(url)s. '
'Error: %(err)s') % {'url': rest_url, 'err': err}
raise exception.VolumeBackendAPIException(msg)

try:
result.raise_for_status()
except requests.HTTPError as exc:
if exc.response.status_code == constants.CONNECT_ERROR:
try:
self.login()
except requests.ConnectTimeout as err:
msg = (_("Sandstone web server may be abnormal "
"or storage may be poweroff. Error: %(err)s")
% {'err': err})
raise exception.VolumeBackendAPIException(msg)
else:
return {"error": {"code": exc.response.status_code,
"description": six.text_type(exc)}}

if not filter_flag:
LOG.info('''
Request URL: %(url)s,
Call Method: %(method)s,
Request Data: %(data)s,
Response Data: %(res)s,
Result Data: %(res_json)s.''', {'url': url, 'method': method,
'data': data, 'res': result,
'res_json': result.json()})

if json_flag:
return result.json()
return result

def _assert_restapi_result(self, result, err):
if result.get("success") != 1:
msg = (_('%(err)s\nresult:%(res)s') % {"err": err,
"res": result})
raise exception.VolumeBackendAPIException(data=msg)

def login(self):
"""Login web get with token session."""
url = 'user/login'

sha256 = hashlib.sha256()
sha256.update(self.password.encode("utf8"))
password = sha256.hexdigest()

data = {"username": self.user, "password": password}
result = self.run(url=url, data=data, method='POST', json_flag=False,
om_op_flag=True)
self._assert_restapi_result(result.json(), _('Login error.'))
cookies = result.cookies
set_cookie = result.headers['Set-Cookie']
self.session.headers['Cookie'] = ';'.join(
['XSRF-TOKEN={}'.format(cookies['XSRF-TOKEN']),
' username={}'.format(self.user),
' sdsom_sessionid={}'.format(self._find_sessionid(set_cookie))])
self.session.headers["Referer"] = self.address
self.session.headers["X-XSRF-TOKEN"] = cookies["XSRF-TOKEN"]

def _find_sessionid(self, headers):
sessionid = re.findall("sdsom_sessionid=(\\w+);", headers)
if sessionid:
return sessionid[0]
return ""

def _check_special_result(self, result, contain):
if result.get("success") == 0 and contain in result.get("data"):
return True

def logout(self):
"""Logout release resource."""
url = 'user/logout'
data = {"username": self.user}
result = self.run(url, 'POST', data=data,
om_op_flag=True)
self._assert_restapi_result(result, _("Logout out error."))

def query_capacity_info(self):
"""Query cluster capacity."""
url = 'capacity'
capacity_info = {}

result = self.run(url, 'POST', filter_flag=True)
self._assert_restapi_result(result, _("Query capacity error."))
capacity_info["capacity_bytes"] = result["data"].get(
"capacity_bytes", 0)
capacity_info["free_bytes"] = result["data"].get("free_bytes", 0)
return capacity_info

def query_pool_info(self):
"""Query use pool status."""
url = 'pool/list'

result = self.run(url, 'POST')
self._assert_restapi_result(result, _("Query pool status error."))
return result["data"]

def get_poolid_from_poolname(self):
"""Use poolname get poolid from pool/list maps."""
data = self.query_pool_info()
poolname_map_poolid = {}
if data:
for pool in data:
poolname_map_poolid[pool["realname"]] = pool["pool_id"]
return poolname_map_poolid

def create_initiator(self, initiator_name):
"""Create client iqn in storage cluster."""
url = 'resource/initiator/create'
data = {"iqn": initiator_name, "type": "iSCSI",
"remark": "Cinder iSCSI"}
result = self.run(url, 'POST', data=data)
# initiator exist, return no err.
if self._check_special_result(result, "already exist"):
return
self._assert_restapi_result(result, _("Create initiator error."))

def _delaytask_list(self, pagesize=20):
url = 'delaytask/list'
data = {"pageno": 1, "pagesize": pagesize}
return self.run(url, 'POST', data=data, om_op_flag=True)

def _judge_delaytask_status(self, wait_time, func_name, *args):
# wait 10 seconds for task
func = getattr(self, func_name.lower())
for wait in range(1, wait_time + 1):
try:
task_status = func(*args)
if self.debug:
LOG.info(task_status)
except exception.VolumeBackendAPIException as exc:
msg = (_("Task: run %(task)s failed, "
"err: %(err)s.")
% {"task": func_name,
"err": exc})
LOG.error(msg)
if task_status.get('run_status') == "failed":
msg = (_("Task : run %(task)s failed, "
"parameter : %(parameter)s, "
"progress is %(process)d.")
% {"task": func_name,
"process": task_status.get('progress'),
"parameter": args})
raise exception.VolumeBackendAPIException(data=msg)
elif task_status.get('run_status') != "completed":
msg = (_("Task : running %(task)s , "
"parameter : %(parameter)s, "
"progress is %(process)d, "
"waited for 1 second, "
"total waited %(total)d second.")
% {"task": func_name,
"process": task_status.get('progress', 0),
"parameter": args,
"total": wait})
LOG.info(msg)
time.sleep(1)
elif task_status.get('run_status') == "completed":
msg = (_("Task : running %(task)s successfully, "
"parameter : %(parameter)s, "
"progress is %(process)d, "
"total spend %(total)d second.")
% {"task": func_name,
"process": task_status.get('progress'),
"parameter": args,
"total": wait})
LOG.info(msg)
break

def add_initiator_to_target(self, target_name, initiator_name):
"""Bind client iqn to storage target iqn."""
url = 'resource/target/add_initiator_to_target'
data = {"targetName": target_name,
"iqns": [{"ip": "", "iqn": initiator_name}]}
result = self.run(url, 'POST', data=data)
# wait 10 seconds to map initiator
self._judge_delaytask_status(self.short_wait,
"query_map_initiator_porcess",
target_name, initiator_name)
self._assert_restapi_result(result, _("Add initiator "
"to target error."))

def query_map_initiator_porcess(self, target_name,
initiator_name):
"""Query initiator add to target process."""
result = self._delaytask_list()
self._assert_restapi_result(result, _("Query mapping "
"initiator process error."))

result = result["data"].get("results", None) or []
expected_parameter = [{"target_name": target_name,
"iqns": [{"ip": "", "iqn": initiator_name}]}]
task = [map_initiator_task for map_initiator_task in result
if map_initiator_task["executor"] == "MapInitiator"
and map_initiator_task["parameter"] == expected_parameter]
if task:
return task[0]
return {}

def query_initiator_by_name(self, initiator_name):
"""Query initiator exist or not."""
url = 'resource/initiator/list'
data = {"initiatorMark": "", "pageno": 1,
"pagesize": self.pagesize, "type": "iSCSI"}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Query initiator "
"by name error."))

result = result["data"].get("results", None) or []
initiator_info = [initiator for initiator in result
if initiator.get("iqn", None) == initiator_name]
if initiator_info:
return initiator_info[0]
return None

def query_target_initiatoracl(self, target_name, initiator_name):
"""Query target iqn bind client iqn info."""
url = 'resource/target/get_target_acl_list'
data = {"pageno": 1, "pagesize": self.pagesize,
"targetName": target_name}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Query target "
"initiatoracl error."))

results = result["data"].get("results", None)
acl_info = [acl for acl in results or []
if acl.get("name", None) == initiator_name]
return acl_info or None

def query_node_by_targetips(self, target_ips):
"""Query target ip relation with node."""
url = 'block/gateway/server/list'
result = self.run(url, 'POST')
self._assert_restapi_result(result, _("Query node by "
"targetips error."))

targetip_to_hostid = {}
for node in result["data"]:
for node_access_ip in node.get("networks"):
goal_ip = node_access_ip.get("address")
if goal_ip in target_ips:
targetip_to_hostid[goal_ip] =\
node_access_ip.get("hostid", None)
return targetip_to_hostid

def query_target_by_name(self, target_name):
"""Query target iqn exist or not."""
url = 'resource/target/list'
data = {"pageno": 1, "pagesize": self.pagesize,
"thirdParty": [0, 1], "targetMark": ""}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Query target by name error."))

result = result["data"].get("results", None) or []
target_info = [target for target in result
if target.get("name", None) == target_name]
if target_info:
return target_info[0]
return None

def create_target(self, target_name, targetip_to_hostid):
"""Create target iqn."""
url = 'resource/target/create'
data = {"type": "iSCSI", "readOnly": 0,
"thirdParty": 1, "targetName": target_name,
"networks": [{"hostid": host_id, "address": address}
for address, host_id in
targetip_to_hostid.items()]}
result = self.run(url, 'POST', data=data)
# target exist, return no err.
if self._check_special_result(result, "already exist"):
return
self._assert_restapi_result(result, _("Create target error."))

def add_chap_by_target(self, target_name, username, password):
"""Add chap to target, only support forward."""
url = 'resource/target/add_chap'
data = {"password": password,
"user": username, "targetName": target_name}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Add chap by target error."))

def query_chapinfo_by_target(self, target_name, username):
"""Query chapinfo by target, check chap add target or not."""
url = 'resource/target/get_chap_list'
data = {"targetName": target_name}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Query chapinfo "
"by target error."))

result = result.get('data') or []
chapinfo = [c for c in result if c.get("user") == username]
if chapinfo:
return chapinfo[0]
return None

def create_lun(self, capacity_bytes, poolid, volume_name):
"""Create lun resource."""
url = 'resource/lun/add'
data = {"capacity_bytes": capacity_bytes,
"poolId": poolid, "priority": "normal",
"qosSettings": {}, "volumeName": volume_name}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Create lun error."))

def delete_lun(self, poolid, volume_name):
"""Delete lun resource."""
url = 'resource/lun/batch_delete'
data = {"delayTime": 0, "volumeNameList": [{
"poolId": poolid,
"volumeName": volume_name}]}
result = self.run(url, 'POST', data=data)
# lun deleted, return no err.
if self._check_special_result(result, "not found"):
return
self._assert_restapi_result(result, _("Delete lun error."))

def extend_lun(self, capacity_bytes, poolid, volume_name):
"""Extend lun, only support enlarge."""
url = 'resource/lun/resize'

data = {"capacity_bytes": capacity_bytes,
"poolId": poolid,
"volumeName": volume_name}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Extend lun error."))

def unmap_lun(self, target_name, poolid, volume_name, pool_name):
"""Unbind lun from target iqn."""
url = 'resource/target/unmap_luns'
volume_info = self.query_lun_by_name(volume_name, poolid)
result = {"success": 0}
if volume_info:
uuid = volume_info.get("uuid", None)
data = {"targetName": target_name,
"targetLunList": [uuid],
"targetSnapList": []}
result = self.run(url, 'POST', data=data)
# lun unmaped, return no err.
if self._check_special_result(result, "not mapped"):
return
# wait for 10 seconds to unmap lun.
self._judge_delaytask_status(self.short_wait,
"query_unmapping_lun_porcess",
target_name, volume_name,
uuid, pool_name)
self._assert_restapi_result(result, _("Unmap lun error."))
else:
self._assert_restapi_result(result,
_("Unmap lun error, uuid is None."))

def mapping_lun(self, target_name, poolid, volume_name, pool_name):
"""Bind lun to target iqn."""
url = 'resource/target/map_luns'
volume_info = self.query_lun_by_name(volume_name, poolid)
result = {"success": 0}
if volume_info:
uuid = volume_info.get("uuid", None)
data = {"targetName": target_name,
"targetLunList": [uuid],
"targetSnapList": []}
result = self.run(url, 'POST', data=data)
# lun maped, return no err.
if self._check_special_result(result, "already mapped"):
return
# wait for 10 seconds to map lun.
self._judge_delaytask_status(self.short_wait,
"query_mapping_lun_porcess",
target_name, volume_name,
uuid, pool_name)
self._assert_restapi_result(result, _("Map lun error."))
else:
self._assert_restapi_result(result,
_("Map lun error, uuid is None."))

def query_mapping_lun_porcess(self, target_name, volume_name,
uuid, pool_name):
"""Query mapping lun process."""
result = self._delaytask_list()
self._assert_restapi_result(result, _("Query mapping "
"lun process error."))

expected_parameter = {"target_name": target_name,
"image_id": uuid,
"target_realname": target_name,
"meta_pool": pool_name,
"image_realname": volume_name}
result = result["data"].get("results", None) or []
task = [map_initiator_task for map_initiator_task in result
if map_initiator_task["executor"] == "TargetMap"
and map_initiator_task["parameter"] == expected_parameter]
if task:
return task[0]
return {}

def query_unmapping_lun_porcess(self, target_name, volume_name,
uuid, pool_name):
"""Query mapping lun process."""
result = self._delaytask_list()
self._assert_restapi_result(result, _("Query mapping "
"lun process error."))

expected_parameter = {"target_name": target_name,
"image_id": uuid,
"target_realname": target_name,
"meta_pool": pool_name,
"image_name": volume_name}
result = result["data"].get("results", None) or []
task = [map_initiator_task for map_initiator_task in result
if map_initiator_task["executor"] == "TargetUnmap"
and map_initiator_task["parameter"] == expected_parameter]
if task:
return task[0]
return {}

def query_target_lunacl(self, target_name, poolid, volume_name):
"""Query target iqn relation with lun."""
url = 'resource/target/get_luns'
data = {"pageno": 1, "pagesize": self.pagesize,
"pools": [poolid], "targetName": target_name}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Query target lunacl error."))

# target get_luns use results
result = result["data"].get("results", None) or []
lunid = [volume.get("lid", None) for volume in result
if volume.get("name", None) == volume_name
and volume.get("pool_id") == poolid]
if lunid:
return lunid[0]
return None

def query_lun_by_name(self, volume_name, poolid):
"""Query lun exist or not."""
url = 'resource/lun/list'
data = {"pageno": 1, "pagesize": self.pagesize,
"volumeMark": volume_name,
"sortType": "time", "sortOrder": "desc",
"pools": [poolid], "thirdParty": [0, 1]}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Query lun by name error."))

result = result["data"].get("results", None) or []
volume_info = [volume for volume in result
if volume.get("volumeName", None) == volume_name]
if volume_info:
return volume_info[0]
return None

def query_target_by_lun(self, volume_name, poolid):
"""Query lun already mapped target name."""
url = "resource/lun/targets"
data = {"poolId": poolid, "volumeName": volume_name}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Query target by lun error."))

data = result["data"]
target_name = data[0].get("name", None)
return target_name

def create_snapshot(self, poolid, volume_name, snapshot_name):
"""Create lun snapshot."""
url = 'resource/snapshot/add'
data = {"lunName": volume_name,
"poolId": poolid,
"remark": "Cinder iSCSI snapshot.",
"snapName": snapshot_name}
result = self.run(url, 'POST', data=data)
# snapshot existed, return no err.
if self._check_special_result(result, "has exists"):
return
# wait for 10 seconds to create snapshot
self._judge_delaytask_status(self.short_wait,
"query_create_snapshot_process",
poolid, volume_name, snapshot_name)
self._assert_restapi_result(result, _("Create snapshot error."))

def query_create_snapshot_process(self, poolid,
volume_name, snapshot_name):
"""Query create snapshot process."""
result = self._delaytask_list()
self._assert_restapi_result(result, _("Query flatten "
"lun process error."))
result = result["data"].get("results", None) or []
task = [flatten_task for flatten_task in result
if flatten_task["executor"] == "SnapCreate"
and flatten_task["parameter"].get("pool_id", None)
== poolid
and flatten_task["parameter"].get("snap_name", None)
== snapshot_name
and flatten_task["parameter"].get("lun_name", None)
== volume_name]

if task:
return task[0]
return {}

def delete_snapshot(self, poolid, volume_name, snapshot_name):
"""Delete lun snapshot."""
url = 'resource/snapshot/delete'
data = {"lunName": volume_name,
"poolId": poolid, "snapName": snapshot_name}
result = self.run(url, 'POST', data=data)
# snapshot deleted, need return no err.
if self._check_special_result(result, "not found"):
return
# wait for 10 seconds to delete snapshot
self._judge_delaytask_status(self.short_wait,
"query_delete_snapshot_process",
poolid, volume_name, snapshot_name)
self._assert_restapi_result(result, _("Delete snapshot error."))

def query_delete_snapshot_process(self, poolid,
volume_name, snapshot_name):
"""Query delete snapshot process."""
result = self._delaytask_list()
self._assert_restapi_result(result, _("Query delete "
"snapshot process error."))
result = result["data"].get("results", None) or []
task = [flatten_task for flatten_task in result
if flatten_task["executor"] == "SnapDelete"
and flatten_task["parameter"].get("pool_id", None)
== poolid
and flatten_task["parameter"].get("snap_name", None)
== snapshot_name
and flatten_task["parameter"].get("lun_name", None)
== volume_name]

if task:
return task[0]
return {}

def create_lun_from_snapshot(self, snapshot_name, src_volume_name,
poolid, dst_volume_name):
"""Create lun from source lun snapshot."""
url = 'resource/snapshot/clone'
data = {"snapshot": {"poolId": poolid,
"lunName": src_volume_name,
"snapName": snapshot_name},
"cloneLun": {"lunName": dst_volume_name,
"poolId": poolid}}
result = self.run(url, 'POST', data=data)
# clone volume exsited, return no err.
if self._check_special_result(result, "already exists"):
return
# wait for 10 seconds to clone lun
self._judge_delaytask_status(self.short_wait,
"query_clone_lun_process",
poolid, src_volume_name, snapshot_name)
self._assert_restapi_result(result, _("Create lun "
"from snapshot error."))
self.flatten_lun(dst_volume_name, poolid)

def query_clone_lun_process(self, poolid, volume_name, snapshot_name):
"""Query clone lun process."""
result = self._delaytask_list()
self._assert_restapi_result(result, _("Query flatten "
"lun process error."))
result = result["data"].get("results", None) or []
task = [flatten_task for flatten_task in result
if flatten_task["executor"] == "SnapClone"
and flatten_task["parameter"].get("pool_id", None)
== poolid
and flatten_task["parameter"].get("snap_name", None)
== snapshot_name
and flatten_task["parameter"].get("lun_name", None)
== volume_name]

if task:
return task[0]
return {}

def flatten_lun(self, volume_name, poolid):
"""Flatten lun."""
url = 'resource/lun/flatten'
data = {"poolId": poolid,
"volumeName": volume_name}
result = self.run(url, 'POST', data=data)
# volume flattened, return no err.
if self._check_special_result(result, "not need flatten"):
return
# wait for longest 200 min to flatten
self._judge_delaytask_status(self.long_wait,
"query_flatten_lun_process",
poolid, volume_name)
self._assert_restapi_result(result, _("Flatten lun error."))

def query_flatten_lun_process(self, poolid, volume_name):
"""Query flatten lun process."""
result = self._delaytask_list()
self._assert_restapi_result(result, _("Query flatten "
"lun process error."))
result = result["data"].get("results", None) or []
task = [flatten_task for flatten_task in result
if flatten_task["executor"] == "LunFlatten"
and flatten_task["parameter"].get("pool_id", None)
== poolid
and flatten_task["parameter"].get("lun_name", None)
== volume_name]
if task:
return task[0]
return {}

def create_lun_from_lun(self, dst_volume_name, poolid, src_volume_name):
"""Clone lun from source lun."""
tmp_snapshot_name = 'temp' + src_volume_name + 'clone' +\
dst_volume_name
self.create_snapshot(poolid, src_volume_name, tmp_snapshot_name)
self.create_lun_from_snapshot(tmp_snapshot_name, src_volume_name,
poolid, dst_volume_name)
self.flatten_lun(dst_volume_name, poolid)

self.delete_snapshot(poolid, src_volume_name, tmp_snapshot_name)

def query_snapshot_by_name(self, volume_name, poolid, snapshot_name):
"""Query snapshot exist or not."""
url = 'resource/snapshot/list'
data = {"lunName": volume_name, "pageno": 1,
"pagesize": self.pagesize, "poolId": poolid,
"snapMark": ""}
result = self.run(url, 'POST', data=data)
self._assert_restapi_result(result, _("Query snapshot by name error."))

result = result["data"].get("results", None) or []
snapshot_info = [snapshot for snapshot in result
if snapshot.get("snapName", None) ==
snapshot_name]
return snapshot_info

+ 513
- 0
cinder/volume/drivers/sandstone/sds_driver.py View File

@@ -0,0 +1,513 @@
# Copyright (c) 2019 ShenZhen SandStone Data Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Volume Drivers for SandStone distributed storage."""

from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import units

from cinder import exception
from cinder.i18n import _
from cinder import interface
from cinder.volume import driver
from cinder.volume.drivers.san import san
from cinder.volume.drivers.sandstone.sds_client import RestCmd

LOG = logging.getLogger(__name__)

sds_opts = [
cfg.ListOpt("default_sandstone_target_ips",
default=[],
help="SandStone default target ip."),
cfg.StrOpt("sandstone_pool",
default="",
help="SandStone storage pool resource name."),
cfg.DictOpt("initiator_assign_sandstone_target_ip",
default={},
help="Support initiator assign target with assign ip.")
]

CONF = cfg.CONF
CONF.register_opts(sds_opts)


class SdsBaseDriver(driver.VolumeDriver):
"""ISCSIDriver base class."""

# ThirdPartySytems wiki page
VERSION = '1.0'
CI_WIKI_NAME = "SandStone_Storage_CI"

def __init__(self, *args, **kwargs):
"""Init configuration."""
super(SdsBaseDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(sds_opts)
self.configuration.append_config_values(san.san_opts)

def do_setup(self, context):
"""Instantiate common class and login storage system."""
if not self.configuration:
msg = _('Configuration is not found.')
raise exception.InvalidConfigurationValue(msg)
self.address = self.configuration.san_ip
self.user = self.configuration.san_login
self.password = self.configuration.san_password
self.pool = self.configuration.sandstone_pool
self.iscsi_info = (self.configuration.
initiator_assign_sandstone_target_ip)
self.default_target_ips = (self.configuration.
default_sandstone_target_ips)
self.chap_username = self.configuration.chap_username
self.chap_password = self.configuration.chap_password
self.suppress_requests_ssl_warnings = (self.configuration.
suppress_requests_ssl_warnings)
self.client = RestCmd(self.address, self.user, self.password,
self.suppress_requests_ssl_warnings)
LOG.debug("Run sandstone driver setup.")

def check_for_setup_error(self):
"""Check pool status and exist or not."""
self.client.login()
self.poolname_map_poolid = self.client.get_poolid_from_poolname()
all_pools = self.client.query_pool_info()
all_pools_name = [p['pool_name'] for p in all_pools
if p.get('pool_name')]

if self.pool not in all_pools_name:
msg = _('Storage pool %(pool)s does not exist '
'in the cluster.') % {'pool': self.pool}
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
pool_status = [p['status'] for p in all_pools
if p.get('pool_name') == self.pool]
if pool_status:
if ("health" not in pool_status[0].get('state') and
pool_status[0].get("progress", 0) != 100):
LOG.warning('Storage pool: %(poolName)s not healthy.',
{"poolName": self.pool})
if not self.poolname_map_poolid:
err_msg = _('poolname_map_poolid info is empty.')
self._raise_exception(err_msg)
self.poolid = self.poolname_map_poolid.get(self.pool)
if not self.poolid:
err_msg = _('poolid is None.')
self._raise_exception(err_msg)

def _update_volume_stats(self, pool_name):
"""Get cluster capability and capacity."""
data, pool = {}, {}
data['pools'] = []

cluster_capacity = self.client.query_capacity_info()
total_capacity_gb = (float(cluster_capacity.get("capacity_bytes", 0))
/ units.Gi)
free_capacity_gb = (float(cluster_capacity.get("free_bytes", 0))
/ units.Gi)

self._stats = pool.update(dict(
pool_name = pool_name,
vendor_name = 'SandStone USP',
driver_version = self.VERSION,
total_capacity_gb = total_capacity_gb,
free_capacity_gb = free_capacity_gb,
QoS_support=True,
thin_provisioning_support=True,
multiattach=False,
))
data['pools'].append(pool)
return data

def get_volume_stats(self, refresh=False):
"""Get volume status and reload sandstone config file."""
if refresh:
return self._update_volume_stats(self.pool)
return self._stats

def _raise_exception(self, msg):
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)

def create_volume(self, volume):
"""Create a volume."""
capacity_bytes = int(volume.size) * units.Gi
self.client.create_lun(capacity_bytes, self.poolid, volume.name)

def delete_volume(self, volume):
"""Delete a volume."""
LOG.debug("Delete volume %(volumeName)s from pool %(poolId)s",
{"volumeName": volume.name,
"poolId": self.poolid})
self.client.delete_lun(self.poolid, volume.name)

def migrate_volume(self, ctxt, volume, host, new_type=None):
"""Migrate a volume within the same array."""
return (False, None)

def create_volume_from_snapshot(self, volume, snapshot):
"""Create a volume from a snapshot.

We use LUNcopy to copy a new volume from snapshot.
The time needed increases as volume size does.
"""
if snapshot.volume:
source_vol_name = snapshot.volume.name
source_vol_size = snapshot.volume.size * units.Gi
destination_vol_name = volume.name
destination_vol_size = volume.size * units.Gi
snapshot_name = snapshot.name

self.client.create_lun_from_snapshot(snapshot_name,
source_vol_name,
self.poolid,
destination_vol_name)
if destination_vol_size > source_vol_size:
self.client.extend_lun(destination_vol_size,
self.poolid, volume.name)
else:
err_msg = _('No such snapshot volume.')
self._raise_exception(err_msg)

def create_cloned_volume(self, dst_volume, src_volume):
"""Clone a new volume from an existing volume."""
if not self._check_volume_exist(src_volume.name):
msg = (_('Source volume: %(volume_name)s does not exist.')
% {'volume_name': src_volume.name})
self._raise_exception(msg)
self.client.create_lun_from_lun(dst_volume.name, self.poolid,
src_volume.name)
dst_vol_size = dst_volume.size * units.Gi
src_vol_size = src_volume.size * units.Gi
if dst_vol_size > src_vol_size:
self.client.extend_lun(dst_vol_size, self.poolid, dst_volume.name)

def _check_volume_exist(self, volume):
return self.client.query_lun_by_name(volume, self.poolid)

def extend_volume(self, volume, new_size):
"""Extend a volume."""
old_volume = self._check_volume_exist(volume.name)
if not old_volume:
msg = (_('Not exist volume: %(volumeName)s')
% {"volumeName": volume.name})
self._raise_exception(msg)

old_size = old_volume.get("capacity_bytes")
new_size = new_size * units.Gi
if new_size == old_size:
LOG.info("New size is equal to the real size from backend "
"storage, no need to extend. "
"realsize: %(oldsize)s, newsize: %(newsize)s.",
{"oldsize": old_size,
"newsize": new_size})
return

if new_size < old_size:
msg = (_("New size should be bigger than the real size from "
"backend storage. "
"realsize: %(oldsize)s, newsize: %(newsize)s.")
% {"oldsize": old_size,
"newsize": new_size})
self._raise_exception(msg)

LOG.info(
'Extend volume: %(volumename)s, '
'oldsize: %(oldsize)s, newsize: %(newsize)s.',
{"volumename": volume.name,
"oldsize": old_size,
"newsize": new_size})
self.client.extend_lun(new_size, self.poolid, volume.name)

def create_snapshot(self, snapshot):
"""Create snapshot from volume."""
volume = snapshot.volume

if not volume:
msg = (_("Can't get volume id from snapshot, snapshot: %(id)s.")
% {"id": snapshot.id})
self._raise_exception(msg)

LOG.debug(
"create snapshot from volumeName: %(volume)s, "
"snap name: %(snapshot)s.",
{"snapshot": snapshot.name,
"volume": volume.name},)
self.client.create_snapshot(self.poolid,