Merge "StorPool: Do not use packages storpool
and storpool.spopenstack
"
This commit is contained in:
commit
0eb032fe43
@ -18,18 +18,14 @@ import pathlib
|
||||
import time
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import importutils
|
||||
|
||||
from os_brick import exception
|
||||
from os_brick.initiator.connectors import base
|
||||
from os_brick.initiator import storpool_utils
|
||||
from os_brick import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
spopenstack = importutils.try_import('storpool.spopenstack')
|
||||
spapi = importutils.try_import('storpool.spapi')
|
||||
|
||||
|
||||
DEV_STORPOOL = pathlib.Path('/dev/storpool')
|
||||
DEV_STORPOOL_BYID = pathlib.Path('/dev/storpool-byid')
|
||||
|
||||
@ -55,21 +51,19 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
super(StorPoolConnector, self).__init__(root_helper, driver=driver,
|
||||
*args, **kwargs)
|
||||
|
||||
if spapi is None:
|
||||
raise exception.BrickException(
|
||||
'Could not import the StorPool API bindings')
|
||||
|
||||
if spopenstack is None:
|
||||
raise exception.BrickException(
|
||||
'Could not import the required module "storpool.spopenstack"')
|
||||
|
||||
try:
|
||||
self._attach = spopenstack.AttachDB(log=LOG)
|
||||
self._config = storpool_utils.get_conf()
|
||||
self._sp_api = storpool_utils.StorPoolAPI(
|
||||
self._config["SP_API_HTTP_HOST"],
|
||||
self._config["SP_API_HTTP_PORT"],
|
||||
self._config["SP_AUTH_TOKEN"])
|
||||
self._volume_prefix = self._config.get(
|
||||
"SP_OPENSTACK_VOLUME_PREFIX", "os")
|
||||
except Exception as e:
|
||||
raise exception.BrickException(
|
||||
'Could not initialize the StorPool API bindings: %s' % (e))
|
||||
'Could not initialize the StorPool API: %s' % (e))
|
||||
|
||||
if "SP_OURID" not in self._attach.config():
|
||||
if "SP_OURID" not in self._config:
|
||||
raise exception.BrickException(
|
||||
'Could not read "SP_OURID" from the StorPool configuration"')
|
||||
|
||||
@ -84,7 +78,7 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
while True:
|
||||
try:
|
||||
force = count == 0
|
||||
self._attach.api().volumesReassignWait(
|
||||
self._sp_api.volumes_reassign_wait(
|
||||
{
|
||||
"reassign": [{
|
||||
"volume": volume,
|
||||
@ -94,7 +88,7 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
}
|
||||
)
|
||||
break
|
||||
except spapi.ApiError as exc:
|
||||
except storpool_utils.StorPoolAPIError as exc:
|
||||
if (
|
||||
exc.name in ("busy", "invalidParam")
|
||||
and "is open at" in exc.desc
|
||||
@ -130,19 +124,20 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
if volume_id is None:
|
||||
raise exception.BrickException(
|
||||
'Invalid StorPool connection data, no volume ID specified.')
|
||||
volume = self._attach.volumeName(volume_id)
|
||||
volume = storpool_utils.os_to_sp_volume_name(
|
||||
self._volume_prefix, volume_id)
|
||||
mode = connection_properties.get('access_mode', None)
|
||||
if mode is None or mode not in ('rw', 'ro'):
|
||||
raise exception.BrickException(
|
||||
'Invalid access_mode specified in the connection data.')
|
||||
try:
|
||||
sp_ourid = self._attach.config()["SP_OURID"]
|
||||
sp_ourid = self._config["SP_OURID"]
|
||||
except KeyError:
|
||||
raise exception.BrickException(
|
||||
'SP_OURID missing, cannot connect volume %s' % volume_id)
|
||||
|
||||
try:
|
||||
self._attach.api().volumesReassignWait(
|
||||
self._sp_api.volumes_reassign_wait(
|
||||
{"reassign": [{"volume": volume, mode: [sp_ourid]}]})
|
||||
except Exception as exc:
|
||||
raise exception.BrickException(
|
||||
@ -150,13 +145,13 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
'failed: %s' % (exc)) from exc
|
||||
|
||||
try:
|
||||
volume_info = self._attach.api().volumeInfo(volume)
|
||||
volume_info = self._sp_api.volume_get_info(volume)
|
||||
sp_global_id = volume_info['globalId']
|
||||
except Exception as exc:
|
||||
raise exception.BrickException(
|
||||
'Communication with the StorPool API '
|
||||
'failed: %s' % (exc)) from exc
|
||||
|
||||
sp_global_id = volume_info.globalId
|
||||
return {'type': 'block',
|
||||
'path': str(DEV_STORPOOL_BYID) + '/' + sp_global_id}
|
||||
|
||||
@ -203,7 +198,7 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
'Invalid StorPool connection data, no device_path specified.')
|
||||
volume_name = path_to_volname(pathlib.Path(device_path))
|
||||
try:
|
||||
sp_ourid = self._attach.config()["SP_OURID"]
|
||||
sp_ourid = self._config["SP_OURID"]
|
||||
except KeyError:
|
||||
raise exception.BrickException(
|
||||
'SP_OURID missing, cannot disconnect volume %s' % volume_name)
|
||||
@ -234,7 +229,8 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
if volume_id is None:
|
||||
raise exception.BrickException(
|
||||
'Invalid StorPool connection data, no volume ID specified.')
|
||||
volume = self._attach.volumeName(volume_id)
|
||||
volume = storpool_utils.os_to_sp_volume_name(
|
||||
self._volume_prefix, volume_id)
|
||||
path = '/dev/storpool/' + volume
|
||||
dpath = connection_properties.get('device_path', None)
|
||||
if dpath is not None and dpath != path:
|
||||
@ -262,7 +258,7 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
:type connection_properties: dict
|
||||
"""
|
||||
names = []
|
||||
prefix = self._attach.volumeName('')
|
||||
prefix = storpool_utils.os_to_sp_volume_name(self._volume_prefix, '')
|
||||
prefixlen = len(prefix)
|
||||
if os.path.isdir('/dev/storpool'):
|
||||
files = os.listdir('/dev/storpool')
|
||||
@ -306,18 +302,19 @@ class StorPoolConnector(base.BaseLinuxConnector):
|
||||
'Invalid StorPool connection data, no volume ID specified.')
|
||||
|
||||
# Get the expected (new) size from the StorPool API
|
||||
volume = self._attach.volumeName(volume_id)
|
||||
volume = storpool_utils.os_to_sp_volume_name(
|
||||
self._volume_prefix, volume_id)
|
||||
LOG.debug('Querying the StorPool API for the size of %(vol)s',
|
||||
{'vol': volume})
|
||||
vdata = self._attach.api().volumeList(volume)[0]
|
||||
LOG.debug('Got size %(size)d', {'size': vdata.size})
|
||||
vdata = self._sp_api.volume(volume)[0]
|
||||
LOG.debug('Got size %(size)d', {'size': vdata['size']})
|
||||
|
||||
# Wait for the StorPool client to update the size of the local device
|
||||
path = '/dev/storpool/' + volume
|
||||
for _ in range(10):
|
||||
for _num in range(10):
|
||||
size = utils.get_device_size(self, path)
|
||||
LOG.debug('Got local size %(size)d', {'size': size})
|
||||
if size == vdata.size:
|
||||
if size == vdata['size']:
|
||||
return size
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
|
213
os_brick/initiator/storpool_utils.py
Normal file
213
os_brick/initiator/storpool_utils.py
Normal file
@ -0,0 +1,213 @@
|
||||
# Copyright (c) 2015 - 2024 StorPool
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import configparser
|
||||
import errno
|
||||
import http.client
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import socket
|
||||
import time
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from os_brick import exception
|
||||
from os_brick.i18n import _
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
DEV_STORPOOL = pathlib.Path('/dev/storpool')
|
||||
DEV_STORPOOL_BYID = pathlib.Path('/dev/storpool-byid')
|
||||
|
||||
|
||||
STORPOOL_CONF_DEFAULTS = {
|
||||
"SP_API_HTTP_HOST": "127.0.0.1",
|
||||
"SP_API_HTTP_PORT": "81",
|
||||
}
|
||||
|
||||
|
||||
ENV_OVERRIDE = ["SP_AUTH_TOKEN", "SP_API_HTTP_HOST", "SP_API_HTTP_PORT"]
|
||||
|
||||
|
||||
def get_conf(section=None, use_env=True):
|
||||
"""Load the StorPool configuration from files and the environment."""
|
||||
config_path = pathlib.Path('/etc/storpool.conf')
|
||||
config_dir_path = pathlib.Path('/etc/storpool.conf.d')
|
||||
|
||||
def _read_with_unnamed(a_parser, file):
|
||||
with open(file) as stream:
|
||||
a_parser.read_string('[UNNAMED_SECTION]\n' + stream.read())
|
||||
|
||||
def _get_env_overrides():
|
||||
overrides = {}
|
||||
for override in ENV_OVERRIDE:
|
||||
if (value := os.environ.get(override)) is not None:
|
||||
overrides[override] = value
|
||||
return overrides
|
||||
|
||||
parser = configparser.ConfigParser(strict=False, allow_no_value=True)
|
||||
parser.optionxform = str
|
||||
|
||||
if not config_path.is_file():
|
||||
message = "File %(file)s does not exist or not a file"
|
||||
raise exception.BrickException(message, file=config_path)
|
||||
|
||||
_read_with_unnamed(parser, config_path)
|
||||
|
||||
if config_dir_path.is_dir():
|
||||
for path in sorted(config_dir_path.iterdir()):
|
||||
path_str = str(path)
|
||||
if path.is_file() \
|
||||
and path_str.endswith(".conf") \
|
||||
and not path_str.startswith("."):
|
||||
_read_with_unnamed(parser, path)
|
||||
|
||||
if section is None:
|
||||
section = platform.node()
|
||||
|
||||
conf = dict(STORPOOL_CONF_DEFAULTS)
|
||||
for sect in ['UNNAMED_SECTION', section]:
|
||||
if parser.has_section(sect):
|
||||
conf.update(dict(parser[sect]))
|
||||
|
||||
if use_env:
|
||||
conf.update(_get_env_overrides())
|
||||
|
||||
return conf
|
||||
|
||||
|
||||
def os_to_sp_volume_name(prefix, volume_id):
|
||||
return "{pfx}--volume-{id}".format(pfx=prefix, id=volume_id)
|
||||
|
||||
|
||||
def os_to_sp_snapshot_name(prefix, type, snapshot_id, more=None):
|
||||
return "{pfx}--{t}--{m}--snapshot-{id}".format(
|
||||
pfx=prefix,
|
||||
t=type,
|
||||
m="none" if more is None else more,
|
||||
id=snapshot_id,
|
||||
)
|
||||
|
||||
|
||||
class StorPoolAPIError(exception.BrickException):
|
||||
"""Borrowed from `storpool.spapi`"""
|
||||
message = _("HTTP: %(status)s, %(name)s: %(desc)s")
|
||||
|
||||
def __init__(self, status, json):
|
||||
self.status = status
|
||||
self.json = json
|
||||
self.name = json['error'].get('name', "<Missing error name>")
|
||||
self.desc = json['error'].get('descr', "<Missing error description>")
|
||||
self.transient = json['error'].get('transient', False)
|
||||
|
||||
super(StorPoolAPIError, self).__init__(
|
||||
status=status, name=self.name, desc=self.desc)
|
||||
|
||||
|
||||
class StorPoolAPI:
|
||||
"""A subset of the Python package `storpool` for a StorPool API client."""
|
||||
|
||||
def __init__(self, host, port, auth, timeout = 300, transient_retries = 5):
|
||||
self.url = f"{host}:{port}"
|
||||
self.auth_header = {'Authorization': f'Storpool v1:{auth}'}
|
||||
self.timeout = timeout
|
||||
self.transient_retries = transient_retries
|
||||
|
||||
def _api_call(self, method, path, body = None):
|
||||
retry = 0
|
||||
last_error = None
|
||||
while True:
|
||||
connection = None
|
||||
try:
|
||||
connection = http.client.HTTPConnection(
|
||||
self.url, timeout=self.timeout)
|
||||
if body:
|
||||
body = json.dumps(body)
|
||||
connection.request(method, path, body, self.auth_header)
|
||||
response = connection.getresponse()
|
||||
status, jres = response.status, json.load(response)
|
||||
|
||||
if status == http.client.OK and 'error' not in jres:
|
||||
return jres['data']
|
||||
|
||||
last_error = StorPoolAPIError(status, jres)
|
||||
if not jres['error'].get('transient', False):
|
||||
raise last_error
|
||||
except (socket.error, http.client.HTTPException) as err:
|
||||
if not (isinstance(err, http.client.HTTPException) or
|
||||
err.errno in (errno.ECONNREFUSED, errno.ECONNRESET)):
|
||||
raise
|
||||
last_error = err
|
||||
finally:
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
if retry >= self.transient_retries:
|
||||
raise last_error
|
||||
|
||||
time.sleep(2**retry)
|
||||
retry += 1
|
||||
|
||||
def disks_list(self):
|
||||
return self._api_call('GET', '/ctrl/1.0/DisksList')
|
||||
|
||||
def volume_templates_list(self):
|
||||
return self._api_call('GET', '/ctrl/1.0/VolumeTemplatesList')
|
||||
|
||||
def volumes_reassign(self, data):
|
||||
self._api_call('POST', '/ctrl/1.0/MultiCluster/VolumesReassign', data)
|
||||
|
||||
def volumes_reassign_wait(self, data):
|
||||
self._api_call(
|
||||
'POST', '/ctrl/1.0/MultiCluster/VolumesReassignWait', data)
|
||||
|
||||
def volume(self, volume):
|
||||
return self._api_call(
|
||||
'GET', f'/ctrl/1.0/MultiCluster/Volume/{volume}')
|
||||
|
||||
def volume_create(self, data):
|
||||
self._api_call('POST', '/ctrl/1.0/MultiCluster/VolumeCreate', data)
|
||||
|
||||
def volume_get_info(self, volume):
|
||||
return self._api_call(
|
||||
'GET', f'/ctrl/1.0/MultiCluster/VolumeGetInfo/{volume}')
|
||||
|
||||
def volume_update(self, volume, data):
|
||||
self._api_call(
|
||||
'POST', f'/ctrl/1.0/MultiCluster/VolumeUpdate/{volume}', data)
|
||||
|
||||
def volume_revert(self, volume, data):
|
||||
self._api_call(
|
||||
'POST', f'/ctrl/1.0/MultiCluster/VolumeRevert/{volume}', data)
|
||||
|
||||
def volume_delete(self, volume):
|
||||
self._api_call('POST', f'/ctrl/1.0/MultiCluster/VolumeDelete/{volume}')
|
||||
|
||||
def volumes_list(self):
|
||||
return self._api_call('GET', '/ctrl/1.0/MultiCluster/VolumesList')
|
||||
|
||||
def snapshot_create(self, volume, data):
|
||||
self._api_call(
|
||||
'POST', f'/ctrl/1.0/MultiCluster/VolumeSnapshot/{volume}', data)
|
||||
|
||||
def snapshot_update(self, snapshot, data):
|
||||
self._api_call(
|
||||
'POST', f'/ctrl/1.0/SnapshotUpdate/{snapshot}', data)
|
||||
|
||||
def snapshot_delete(self, snapshot):
|
||||
self._api_call(
|
||||
'POST', f'/ctrl/1.0/MultiCluster/SnapshotDelete/{snapshot}')
|
@ -19,54 +19,27 @@ from unittest import mock
|
||||
|
||||
from os_brick import exception
|
||||
from os_brick.initiator.connectors import storpool as connector
|
||||
from os_brick.initiator import storpool_utils
|
||||
from os_brick.tests.initiator import test_connector
|
||||
from os_brick.tests.initiator import test_storpool_utils
|
||||
|
||||
|
||||
def volumeNameExt(vid):
|
||||
return 'os--volume--{id}'.format(id=vid)
|
||||
return 'os--volume-{id}'.format(id=vid)
|
||||
|
||||
|
||||
def faulty_api(req):
|
||||
faulty_api.real_fn(req)
|
||||
if faulty_api.fail_count > 0:
|
||||
faulty_api.fail_count -= 1
|
||||
raise MockApiError("busy",
|
||||
"'os--volume--sp-vol-1' is open at client 19")
|
||||
|
||||
|
||||
class MockApiError(Exception):
|
||||
def __init__(self, name, desc):
|
||||
super(MockApiError, self).__init__()
|
||||
self.name = name
|
||||
self.desc = desc
|
||||
|
||||
|
||||
class MockVolumeInfo(object):
|
||||
def __init__(self, global_id):
|
||||
self.globalId = global_id
|
||||
|
||||
|
||||
class MockStorPoolADB(object):
|
||||
def __init__(self, log):
|
||||
pass
|
||||
|
||||
def api(self):
|
||||
pass
|
||||
|
||||
def config(self):
|
||||
return {"SP_OURID": 1}
|
||||
|
||||
def volumeName(self, vid):
|
||||
return volumeNameExt(vid)
|
||||
|
||||
|
||||
spopenstack = mock.Mock()
|
||||
spopenstack.AttachDB = MockStorPoolADB
|
||||
connector.spopenstack = spopenstack
|
||||
|
||||
spapi = mock.Mock()
|
||||
spapi.ApiError = MockApiError
|
||||
connector.spapi = spapi
|
||||
raise storpool_utils.StorPoolAPIError(
|
||||
500,
|
||||
{
|
||||
'error': {
|
||||
'name': 'busy',
|
||||
'descr': "'os--volume--sp-vol-1' is open at client 19"
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
@ -96,7 +69,7 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
'access_mode': 'rw'
|
||||
}
|
||||
self.fakeDeviceInfo = {
|
||||
'path': '/dev/storpool/' + 'os--volume--' + 'sp-vol-1'
|
||||
'path': '/dev/storpool/' + 'os--volume-' + 'sp-vol-1'
|
||||
}
|
||||
self.fakeGlobalId = 'OneNiceGlobalId'
|
||||
self.api_calls_retry_max = 10
|
||||
@ -104,44 +77,52 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
self.fakeSize = 1024 * 1024 * 1024
|
||||
self.reassign_wait_data = {'reassign': [
|
||||
{'volume': volumeNameExt(self.fakeProp['volume']),
|
||||
'detach': [1], 'force': False}]}
|
||||
'detach': ['1'], 'force': False}]}
|
||||
|
||||
self.connector = connector.StorPoolConnector(
|
||||
None, execute=self.execute)
|
||||
self.adb = self.connector._attach
|
||||
|
||||
def test_raise_if_spopenstack_missing(self):
|
||||
with mock.patch.object(connector, 'spopenstack', None):
|
||||
self.assertRaises(exception.BrickException,
|
||||
connector.StorPoolConnector, "")
|
||||
with mock.patch(
|
||||
'os_brick.initiator.storpool_utils.get_conf'
|
||||
) as get_conf:
|
||||
get_conf.return_value = test_storpool_utils.SP_CONF
|
||||
self.connector = connector.StorPoolConnector(
|
||||
None, execute=self.execute)
|
||||
|
||||
def test_raise_if_sp_ourid_missing(self):
|
||||
with mock.patch.object(spopenstack.AttachDB, 'config', lambda x: {}):
|
||||
conf_no_sp_ourid = copy.deepcopy(test_storpool_utils.SP_CONF)
|
||||
del conf_no_sp_ourid['SP_OURID']
|
||||
|
||||
with mock.patch(
|
||||
'os_brick.initiator.storpool_utils.get_conf'
|
||||
) as get_conf:
|
||||
get_conf.return_value = conf_no_sp_ourid
|
||||
self.assertRaises(exception.BrickException,
|
||||
connector.StorPoolConnector, "")
|
||||
|
||||
def test_connect_volume(self):
|
||||
volume_name = volumeNameExt(self.fakeProp['volume'])
|
||||
api = mock.MagicMock(spec=['volumesReassignWait', 'volumeInfo'])
|
||||
api.volumesReassignWait = mock.MagicMock(spec=['__call__'])
|
||||
volume_info = MockVolumeInfo(self.fakeGlobalId)
|
||||
api.volumeInfo = mock.Mock(return_value=volume_info)
|
||||
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api:
|
||||
fake_api.return_value = api
|
||||
api = mock.MagicMock(spec=['volumes_reassign_wait', 'volume_get_info'])
|
||||
api.volumes_reassign_wait = mock.MagicMock(spec=['__call__'])
|
||||
api.volume_get_info = mock.Mock(
|
||||
return_value={"globalId": self.fakeGlobalId})
|
||||
reassign_wait_expected = {
|
||||
'reassign': [
|
||||
{
|
||||
'volume': 'os--volume-sp-vol-1',
|
||||
'rw': ['1']
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
with mock.patch.object(self.connector, attribute='_sp_api', new=api):
|
||||
conn = self.connector.connect_volume(self.fakeProp)
|
||||
self.assertIn('type', conn)
|
||||
self.assertIn('path', conn)
|
||||
self.assertEqual(conn['path'],
|
||||
'/dev/storpool-byid/' + self.fakeGlobalId)
|
||||
self.assertEqual(len(api.volumesReassignWait.mock_calls), 1)
|
||||
self.assertEqual(api.volumesReassignWait.mock_calls[0], mock.call(
|
||||
{'reassign': [{'volume': 'os--volume--sp-vol-1', 'rw': [1]}]}))
|
||||
self.assertEqual(len(api.volumeInfo.mock_calls), 1)
|
||||
self.assertEqual(api.volumeInfo.mock_calls[0],
|
||||
self.assertEqual(len(api.volumes_reassign_wait.mock_calls), 1)
|
||||
self.assertEqual(api.volumes_reassign_wait.mock_calls[0],
|
||||
mock.call(reassign_wait_expected))
|
||||
self.assertEqual(len(api.volume_get_info.mock_calls), 1)
|
||||
self.assertEqual(api.volume_get_info.mock_calls[0],
|
||||
mock.call(volume_name))
|
||||
|
||||
self.assertEqual(self.connector.get_search_path(), '/dev/storpool')
|
||||
@ -157,20 +138,16 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
if self.fakeConnection is None:
|
||||
self.test_connect_volume()
|
||||
|
||||
api = mock.MagicMock(spec=['volumesReassignWait'])
|
||||
api = mock.MagicMock(spec=['volumes_reassign_wait'])
|
||||
api.volumesReassignWait = mock.MagicMock(spec=['__call__'])
|
||||
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api:
|
||||
fake_api.return_value = api
|
||||
|
||||
with mock.patch.object(self.connector, attribute='_sp_api', new=api):
|
||||
self.connector.disconnect_volume(self.fakeProp,
|
||||
self.fakeDeviceInfo)
|
||||
self.assertEqual(api.volumesReassignWait.mock_calls[0],
|
||||
self.assertEqual(api.volumes_reassign_wait.mock_calls[0],
|
||||
(mock.call(self.reassign_wait_data)))
|
||||
|
||||
api.volumesReassignWait = mock.MagicMock(spec=['__call__'])
|
||||
api.volumes_reassign_wait = mock.MagicMock(spec=['__call__'])
|
||||
fake_device_info = copy.deepcopy(self.fakeDeviceInfo)
|
||||
fake_device_info["path"] = \
|
||||
"/dev/storpool-byid/" \
|
||||
@ -180,7 +157,7 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
"~byid-paths-map-to-"\
|
||||
"volumes-with-a-tilde-prefix"
|
||||
self.connector.disconnect_volume(self.fakeProp, fake_device_info)
|
||||
self.assertEqual(api.volumesReassignWait.mock_calls[0],
|
||||
self.assertEqual(api.volumes_reassign_wait.mock_calls[0],
|
||||
(mock.call(rwd)))
|
||||
|
||||
fake_device_info = copy.deepcopy(self.fakeDeviceInfo)
|
||||
@ -194,7 +171,7 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
"~byid-paths-map-to-"\
|
||||
"volumes-with-a-tilde-prefix"
|
||||
self.connector.disconnect_volume(fake_prop, fake_device_info)
|
||||
self.assertEqual(api.volumesReassignWait.mock_calls[0],
|
||||
self.assertEqual(api.volumes_reassign_wait.mock_calls[0],
|
||||
(mock.call(rwd)))
|
||||
|
||||
fake_device_info = copy.deepcopy(self.fakeDeviceInfo)
|
||||
@ -215,15 +192,11 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
|
||||
def test_connect_exceptions(self):
|
||||
"""Raise exceptions on missing connection information"""
|
||||
api = mock.MagicMock(spec=['volumesReassignWait', 'volumeInfo'])
|
||||
api.volumesReassignWait = mock.MagicMock(spec=['__call__'])
|
||||
api.volumeInfo = mock.MagicMock(spec=['__call__'])
|
||||
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api:
|
||||
fake_api.return_value = api
|
||||
api = mock.MagicMock(spec=['volumes_reassign_wait', 'volume_get_info'])
|
||||
api.volumes_reassign_wait = mock.MagicMock(spec=['__call__'])
|
||||
api.volume_get_info = mock.MagicMock(spec=['__call__'])
|
||||
|
||||
with mock.patch.object(self.connector, attribute='_sp_api', new=api):
|
||||
for key in ['volume', 'client_id', 'access_mode']:
|
||||
fake_prop = copy.deepcopy(self.fakeProp)
|
||||
del fake_prop[key]
|
||||
@ -244,9 +217,15 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
|
||||
def test_sp_ourid_exceptions(self):
|
||||
"""Raise exceptions on missing SP_OURID"""
|
||||
with mock.patch.object(self.connector._attach, 'config')\
|
||||
as fake_config:
|
||||
fake_config.return_value = {}
|
||||
conf_no_sp_ourid = copy.deepcopy(test_storpool_utils.SP_CONF)
|
||||
del conf_no_sp_ourid['SP_OURID']
|
||||
|
||||
with mock.patch(
|
||||
'os_brick.initiator.storpool_utils.get_conf'
|
||||
) as get_conf:
|
||||
conf_no_sp_ourid = copy.deepcopy(test_storpool_utils.SP_CONF)
|
||||
del conf_no_sp_ourid['SP_OURID']
|
||||
get_conf.return_value = conf_no_sp_ourid
|
||||
|
||||
self.assertRaises(exception.BrickException,
|
||||
self.connector.connect_volume, self.fakeProp)
|
||||
@ -257,17 +236,13 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
|
||||
def test_sp_api_exceptions(self):
|
||||
"""Handle SP API exceptions"""
|
||||
api = mock.MagicMock(spec=['volumesReassignWait', 'volumeInfo'])
|
||||
api.volumesReassignWait = mock.MagicMock(spec=['__call__'])
|
||||
api = mock.MagicMock(spec=['volumes_reassign_wait', 'volume_get_info'])
|
||||
api.volumes_reassign_wait = mock.MagicMock(spec=['__call__'])
|
||||
# The generic exception should bypass the SP API exception handling
|
||||
api.volumesReassignWait.side_effect = Exception()
|
||||
api.volumeInfo = mock.MagicMock(spec=['__call__'])
|
||||
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api:
|
||||
fake_api.return_value = api
|
||||
api.volumes_reassign_wait.side_effect = Exception()
|
||||
api.volume_get_info = mock.MagicMock(spec=['__call__'])
|
||||
|
||||
with mock.patch.object(self.connector, attribute='_sp_api', new=api):
|
||||
self.assertRaises(exception.BrickException,
|
||||
self.connector.connect_volume, self.fakeProp)
|
||||
|
||||
@ -275,14 +250,10 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
self.connector.disconnect_volume, self.fakeProp,
|
||||
self.fakeDeviceInfo)
|
||||
|
||||
api.volumesReassignWait.side_effect = ""
|
||||
api.volumeInfo = Exception()
|
||||
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api:
|
||||
fake_api.return_value = api
|
||||
api.volumes_reassign_wait.side_effect = ""
|
||||
api.volume_get_info = Exception()
|
||||
|
||||
with mock.patch.object(self.connector, attribute='_sp_api', new=api):
|
||||
self.assertRaises(exception.BrickException,
|
||||
self.connector.connect_volume, self.fakeProp)
|
||||
|
||||
@ -294,15 +265,11 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
def init_mock_api(retries):
|
||||
faulty_api.fail_count = retries
|
||||
faulty_api.real_fn = mock.MagicMock(spec=['__call__'])
|
||||
api.volumesReassignWait = faulty_api
|
||||
api.volumeInfo = mock.MagicMock(spec=['__call__'])
|
||||
api.volumes_reassign_wait = faulty_api
|
||||
api.volume_get_info = mock.MagicMock(spec=['__call__'])
|
||||
|
||||
init_mock_api(self.api_calls_retry_max - 1)
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api:
|
||||
fake_api.return_value = api
|
||||
|
||||
with mock.patch.object(self.connector, attribute='_sp_api', new=api):
|
||||
self.connector.disconnect_volume(self.fakeProp,
|
||||
self.fakeDeviceInfo)
|
||||
self.assertEqual(self.api_calls_retry_max,
|
||||
@ -311,10 +278,7 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
self.assertEqual(mock_call, mock.call(self.reassign_wait_data))
|
||||
|
||||
init_mock_api(self.api_calls_retry_max)
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api:
|
||||
fake_api.return_value = api
|
||||
with mock.patch.object(self.connector, attribute='_sp_api', new=api):
|
||||
rwd = copy.deepcopy(self.reassign_wait_data)
|
||||
|
||||
self.connector.disconnect_volume(self.fakeProp,
|
||||
@ -327,10 +291,7 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
self.assertEqual(faulty_api.real_fn.mock_calls[-1], mock.call(rwd))
|
||||
|
||||
init_mock_api(self.api_calls_retry_max + 1)
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api:
|
||||
fake_api.return_value = api
|
||||
with mock.patch.object(self.connector, attribute='_sp_api', new=api):
|
||||
rwd = copy.deepcopy(self.reassign_wait_data)
|
||||
|
||||
self.assertRaises(exception.BrickException,
|
||||
@ -351,34 +312,26 @@ class StorPoolConnectorTestCase(test_connector.ConnectorTestCase):
|
||||
|
||||
size_list = [self.fakeSize, self.fakeSize - 1, self.fakeSize - 2]
|
||||
|
||||
vdata = mock.MagicMock(spec=['size'])
|
||||
vdata.size = self.fakeSize
|
||||
vdata_list = [[vdata]]
|
||||
vdata_list = [[{'size': self.fakeSize}]]
|
||||
|
||||
def fake_volume_list(name):
|
||||
self.assertEqual(
|
||||
name,
|
||||
self.adb.volumeName(self.fakeProp['volume'])
|
||||
)
|
||||
self.assertEqual(name, volumeNameExt(self.fakeProp['volume']))
|
||||
return vdata_list.pop()
|
||||
|
||||
api = mock.MagicMock(spec=['volumeList'])
|
||||
api.volumeList = mock.MagicMock(spec=['__call__'])
|
||||
api = mock.MagicMock(spec=['volume'])
|
||||
api.volume = mock.MagicMock(spec=['__call__'])
|
||||
api.volume.side_effect = fake_volume_list
|
||||
|
||||
with mock.patch.object(
|
||||
self.adb, attribute='api', spec=['__call__']
|
||||
) as fake_api, mock.patch.object(
|
||||
self.connector, attribute='_sp_api', new=api
|
||||
), mock.patch.object(
|
||||
self, attribute='get_fake_size', spec=['__call__']
|
||||
) as fake_size, mock.patch('time.sleep') as fake_sleep:
|
||||
fake_api.return_value = api
|
||||
api.volumeList.side_effect = fake_volume_list
|
||||
|
||||
fake_size.side_effect = size_list.pop
|
||||
|
||||
newSize = self.connector.extend_volume(self.fakeProp)
|
||||
|
||||
self.assertEqual(fake_api.call_count, 1)
|
||||
self.assertEqual(api.volumeList.call_count, 1)
|
||||
self.assertEqual(api.volume.call_count, 1)
|
||||
self.assertListEqual(vdata_list, [])
|
||||
|
||||
self.assertEqual(fake_size.call_count, 3)
|
||||
|
231
os_brick/tests/initiator/test_storpool_utils.py
Normal file
231
os_brick/tests/initiator/test_storpool_utils.py
Normal file
@ -0,0 +1,231 @@
|
||||
# Copyright (c) 2015 - 2024 StorPool
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
from unittest import mock
|
||||
|
||||
|
||||
from os_brick import exception
|
||||
from os_brick.initiator import storpool_utils
|
||||
from os_brick.tests import base
|
||||
|
||||
STORPOOL_CONF_INI_NO_HOSTNAME = """\
|
||||
SP_API_HTTP_HOST=127.0.0.1
|
||||
SP_API_HTTP_PORT=81
|
||||
SP_AUTH_TOKEN=1234567890123456789
|
||||
|
||||
[another-node]
|
||||
SP_OURID=2
|
||||
"""
|
||||
|
||||
STORPOOL_CONF_INI = STORPOOL_CONF_INI_NO_HOSTNAME + """\
|
||||
[this-node]
|
||||
SP_OURID=1
|
||||
"""
|
||||
|
||||
STORPOOL_CONF_INI_SELECTOR = "full"
|
||||
|
||||
ANOTHER_CONF_INI = """\
|
||||
SP_API_HTTP_HOST=127.0.100.1
|
||||
SP_API_HTTP_PORT=8080
|
||||
"""
|
||||
|
||||
SP_CONF = {
|
||||
'SP_API_HTTP_HOST': '127.0.100.1',
|
||||
'SP_API_HTTP_PORT': '8080',
|
||||
'SP_AUTH_TOKEN': '1234567890123456789',
|
||||
'SP_OURID': '1'
|
||||
}
|
||||
|
||||
|
||||
def faulty_api(req):
|
||||
faulty_api.real_fn(req)
|
||||
if faulty_api.fail_count > 0:
|
||||
faulty_api.fail_count -= 1
|
||||
raise storpool_utils.StorPoolAPIError(
|
||||
500,
|
||||
{
|
||||
'error': {
|
||||
'name': 'busy',
|
||||
'descr': "'os--volume--sp-vol-1' is open at client 19"
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
def _fake_open(path):
|
||||
data = ""
|
||||
if path.name == '/etc/storpool.conf':
|
||||
if STORPOOL_CONF_INI_SELECTOR == 'full':
|
||||
data = STORPOOL_CONF_INI
|
||||
if STORPOOL_CONF_INI_SELECTOR == 'no-hostname':
|
||||
data = STORPOOL_CONF_INI_NO_HOSTNAME
|
||||
elif path.name == '/etc/storpool.conf.d/another.conf':
|
||||
data = ANOTHER_CONF_INI
|
||||
else:
|
||||
raise Exception(f"Called open with an unexpected path: {path}")
|
||||
|
||||
open_mock = mock.Mock()
|
||||
open_mock.read = lambda: data
|
||||
ctx_mock = mock.Mock()
|
||||
ctx_mock.__enter__ = mock.Mock(return_value=open_mock)
|
||||
ctx_mock.__exit__ = mock.Mock()
|
||||
return ctx_mock
|
||||
|
||||
|
||||
def _fake_node():
|
||||
return 'this-node'
|
||||
|
||||
|
||||
class FakePath:
|
||||
def __init__(self, name, exists, is_file, dir_contents = None):
|
||||
self.name = name
|
||||
self.exists = exists
|
||||
self.is_a_file = is_file
|
||||
self.dir_contents = dir_contents
|
||||
|
||||
def is_file(self):
|
||||
return self.exists and self.is_a_file
|
||||
|
||||
def is_dir(self):
|
||||
return self.exists and not self.is_a_file
|
||||
|
||||
def iterdir(self):
|
||||
if self.dir_contents is None:
|
||||
raise Exception(
|
||||
f"Called iterdir() on a non-directory: {self.name}")
|
||||
return self.dir_contents
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
@mock.patch('builtins.open', _fake_open)
|
||||
@mock.patch('platform.node', _fake_node)
|
||||
class StorPoolConfTestCase(base.TestCase):
|
||||
def setUp(self):
|
||||
super(StorPoolConfTestCase, self).setUp()
|
||||
|
||||
self.mock_path = mock.Mock()
|
||||
self.fs_tree = [
|
||||
FakePath('/etc/storpool.conf', True, True),
|
||||
FakePath('/etc/storpool.conf.d', True, False, [])
|
||||
]
|
||||
|
||||
def test_subconf_overrides_main(self):
|
||||
self.fs_tree[1] = FakePath('/etc/storpool.conf.d', True, False, [
|
||||
FakePath('/etc/storpool.conf.d/another.conf', True, True)
|
||||
])
|
||||
self._fs_init(self.fs_tree, 'full')
|
||||
|
||||
with mock.patch('pathlib.Path', self.mock_path):
|
||||
conf = storpool_utils.get_conf()
|
||||
|
||||
self.assertEqual(SP_CONF, conf)
|
||||
|
||||
def test_only_storpool_conf(self):
|
||||
self._fs_init(self.fs_tree, 'full')
|
||||
|
||||
sp_conf_expected = copy.deepcopy(SP_CONF)
|
||||
sp_conf_expected['SP_API_HTTP_HOST'] = '127.0.0.1'
|
||||
sp_conf_expected['SP_API_HTTP_PORT'] = '81'
|
||||
|
||||
with mock.patch('pathlib.Path', self.mock_path):
|
||||
conf = storpool_utils.get_conf()
|
||||
|
||||
self.assertEqual(sp_conf_expected, conf)
|
||||
|
||||
def test_env_overrides_main(self):
|
||||
self._fs_init(self.fs_tree, 'full')
|
||||
|
||||
overrides_expected = {
|
||||
'SP_API_HTTP_HOST': '192.168.0.10',
|
||||
'SP_API_HTTP_PORT': '8123'
|
||||
}
|
||||
|
||||
sp_conf_expected = copy.deepcopy(SP_CONF)
|
||||
sp_conf_expected.update(overrides_expected)
|
||||
|
||||
with (mock.patch('pathlib.Path', self.mock_path),
|
||||
mock.patch.dict(os.environ, overrides_expected)):
|
||||
conf = storpool_utils.get_conf()
|
||||
|
||||
self.assertEqual(sp_conf_expected, conf)
|
||||
|
||||
def test_raise_if_no_storpool_conf(self):
|
||||
self.fs_tree[0] = FakePath('/etc/storpool.conf', False, True)
|
||||
self._fs_init(self.fs_tree, 'full')
|
||||
|
||||
with mock.patch('pathlib.Path', self.mock_path):
|
||||
self.assertRaises(exception.BrickException,
|
||||
storpool_utils.get_conf)
|
||||
|
||||
def _fs_init(self, fs, storpool_conf_type):
|
||||
global STORPOOL_CONF_INI_SELECTOR
|
||||
STORPOOL_CONF_INI_SELECTOR = storpool_conf_type
|
||||
self.mock_path.side_effect = fs
|
||||
|
||||
|
||||
class StorPoolAPITestCase(base.TestCase):
|
||||
def setUp(self):
|
||||
super(StorPoolAPITestCase, self).setUp()
|
||||
self.api = storpool_utils.StorPoolAPI(
|
||||
'127.0.0.1', '81', '1234567890123456789')
|
||||
|
||||
def test_api_ok(self):
|
||||
with mock.patch('http.client.HTTPConnection') as connection_mock:
|
||||
resp = mock.Mock()
|
||||
c_mock = connection_mock.return_value
|
||||
c_mock.getresponse = mock.Mock(return_value=resp)
|
||||
|
||||
resp.status = 200
|
||||
resp.read = lambda: '{ "data": [{ "name": "test-volume" }] }'
|
||||
|
||||
self.assertEqual(self.api.volumes_list(),
|
||||
[{'name': 'test-volume'}])
|
||||
|
||||
def test_api_exceptions(self):
|
||||
with mock.patch('http.client.HTTPConnection') as connection_mock:
|
||||
resp = mock.Mock()
|
||||
c_mock = connection_mock.return_value
|
||||
c_mock.getresponse = mock.Mock(return_value=resp)
|
||||
|
||||
resp.status = 200
|
||||
resp.read = lambda: '{}'
|
||||
self.assertRaises(KeyError, self.api.volumes_list)
|
||||
|
||||
resp.read = lambda: '{/}'
|
||||
self.assertRaises(json.JSONDecodeError, self.api.volumes_list)
|
||||
|
||||
resp.read = lambda: '{ "error": { "transient": true } }'
|
||||
self.assertRaises(storpool_utils.StorPoolAPIError,
|
||||
self.api.volumes_list)
|
||||
|
||||
def test_api_handle_transient(self):
|
||||
with mock.patch('http.client.HTTPConnection') as connection_mock:
|
||||
resp = mock.Mock()
|
||||
resp.status = 500
|
||||
resp.read = lambda: '{ "error": { "transient": true } }'
|
||||
|
||||
resp1 = mock.Mock()
|
||||
resp1.status = 200
|
||||
resp1.read = lambda: '{ "data": [{ "name": "test-volume" }] }'
|
||||
|
||||
c_mock = connection_mock.return_value
|
||||
c_mock.getresponse = mock.Mock(side_effect=[resp, resp, resp1])
|
||||
|
||||
self.assertEqual(self.api.volumes_list(),
|
||||
[{'name': 'test-volume'}])
|
@ -0,0 +1,8 @@
|
||||
---
|
||||
other:
|
||||
- |
|
||||
A new in-tree implementation for communicating with the StorPool API
|
||||
and reading StorPool configuration files.
|
||||
|
||||
The StorPool backend no longer requires the OpenStack nodes to have
|
||||
the Python packages `storpool` and `storpool.spopenstack` installed.
|
Loading…
x
Reference in New Issue
Block a user