Add Hitachi HSP driver
This patch adds the Hitachi HSP driver. Co-Authored-By: Marcus V R Nascimento <marcusvrn@gmail.com> DocImpact Implements: blueprint hsp-driver Change-Id: Ie3028cbff56873721dc51ff4d7fccf7fb7e99611
This commit is contained in:
parent
930a3fb921
commit
2a7d480d01
@ -53,6 +53,8 @@ Mapping of share drivers and share features support
|
||||
+----------------------------------------+-----------------------+-----------------------+--------------+--------------+------------------------+----------------------------+--------------------------+
|
||||
| Hitachi HNAS | L | L | L | M | L | L | \- |
|
||||
+----------------------------------------+-----------------------+-----------------------+--------------+--------------+------------------------+----------------------------+--------------------------+
|
||||
| Hitachi HSP | N | N | N | N | \- | \- | \- |
|
||||
+----------------------------------------+-----------------------+-----------------------+--------------+--------------+------------------------+----------------------------+--------------------------+
|
||||
| HPE 3PAR | K | \- | \- | \- | K | K | \- |
|
||||
+----------------------------------------+-----------------------+-----------------------+--------------+--------------+------------------------+----------------------------+--------------------------+
|
||||
| Huawei | K | L | L | L | K | M | \- |
|
||||
@ -100,6 +102,8 @@ Mapping of share drivers and share access rules support
|
||||
+----------------------------------------+--------------+----------------+------------+--------------+--------------+----------------+------------+------------+
|
||||
| Hitachi HNAS | NFS (L) | \- | \- | \- | NFS (L) | \- | \- | \- |
|
||||
+----------------------------------------+--------------+----------------+------------+--------------+--------------+----------------+------------+------------+
|
||||
| Hitachi HSP | NFS (N) | \- | \- | \- | NFS (N) | \- | \- | \- |
|
||||
+----------------------------------------+--------------+----------------+------------+--------------+--------------+----------------+------------+------------+
|
||||
| HPE 3PAR | NFS,CIFS (K) | CIFS (K) | \- | \- | \- | \- | \- | \- |
|
||||
+----------------------------------------+--------------+----------------+------------+--------------+--------------+----------------+------------+------------+
|
||||
| Huawei | NFS (K) |NFS (M),CIFS (K)| \- | \- | NFS (K) |NFS (M),CIFS (K)| \- | \- |
|
||||
@ -145,6 +149,8 @@ Mapping of share drivers and security services support
|
||||
+----------------------------------------+------------------+-----------------+------------------+
|
||||
| Hitachi HNAS | \- | \- | \- |
|
||||
+----------------------------------------+------------------+-----------------+------------------+
|
||||
| Hitachi HSP | \- | \- | \- |
|
||||
+----------------------------------------+------------------+-----------------+------------------+
|
||||
| HPE 3PAR | \- | \- | \- |
|
||||
+----------------------------------------+------------------+-----------------+------------------+
|
||||
| Huawei | M | M | \- |
|
||||
@ -190,6 +196,8 @@ Mapping of share drivers and common capabilities
|
||||
+----------------------------------------+-----------+------------+--------+-------------+-------------------+--------------------+-----+
|
||||
| Hitachi HNAS | \- | L | \- | \- | L | \- | \- |
|
||||
+----------------------------------------+-----------+------------+--------+-------------+-------------------+--------------------+-----+
|
||||
| Hitachi HSP | \- | N | \- | \- | N | \- | \- |
|
||||
+----------------------------------------+-----------+------------+--------+-------------+-------------------+--------------------+-----+
|
||||
| HPE 3PAR | L | K | L | \- | L | L | \- |
|
||||
+----------------------------------------+-----------+------------+--------+-------------+-------------------+--------------------+-----+
|
||||
| Huawei | M | K | L | L | L | L | M |
|
||||
|
@ -794,3 +794,16 @@ class StorageCommunicationException(ShareBackendException):
|
||||
|
||||
class EvaluatorParseException(ManilaException):
|
||||
message = _("Error during evaluator parsing: %(reason)s")
|
||||
|
||||
|
||||
# Hitachi Scaleout Platform driver
|
||||
class HSPBackendException(ShareBackendException):
|
||||
message = _("HSP Backend Exception: %(msg)s")
|
||||
|
||||
|
||||
class HSPTimeoutException(ShareBackendException):
|
||||
message = _("HSP Timeout Exception: %(msg)s")
|
||||
|
||||
|
||||
class HSPItemNotFoundException(ShareBackendException):
|
||||
message = _("HSP Item Not Found Exception: %(msg)s")
|
||||
|
@ -61,6 +61,7 @@ import manila.share.drivers.glusterfs.layout_directory
|
||||
import manila.share.drivers.glusterfs.layout_volume
|
||||
import manila.share.drivers.hdfs.hdfs_native
|
||||
import manila.share.drivers.hitachi.hds_hnas
|
||||
import manila.share.drivers.hitachi.hsp.driver
|
||||
import manila.share.drivers.hpe.hpe_3par_driver
|
||||
import manila.share.drivers.huawei.huawei_nas
|
||||
import manila.share.drivers.ibm.gpfs
|
||||
@ -127,6 +128,7 @@ _global_opt_lists = [
|
||||
manila.share.drivers.glusterfs.layout_volume.glusterfs_volume_mapped_opts,
|
||||
manila.share.drivers.hdfs.hdfs_native.hdfs_native_share_opts,
|
||||
manila.share.drivers.hitachi.hds_hnas.hds_hnas_opts,
|
||||
manila.share.drivers.hitachi.hsp.driver.hitachi_hsp_opts,
|
||||
manila.share.drivers.hpe.hpe_3par_driver.HPE3PAR_OPTS,
|
||||
manila.share.drivers.huawei.huawei_nas.huawei_opts,
|
||||
manila.share.drivers.ibm.gpfs.gpfs_share_opts,
|
||||
|
0
manila/share/drivers/hitachi/hsp/__init__.py
Normal file
0
manila/share/drivers/hitachi/hsp/__init__.py
Normal file
321
manila/share/drivers/hitachi/hsp/driver.py
Normal file
321
manila/share/drivers/hitachi/hsp/driver.py
Normal file
@ -0,0 +1,321 @@
|
||||
# Copyright (c) 2016 Hitachi Data Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import units
|
||||
|
||||
from manila.common import constants
|
||||
from manila import exception
|
||||
from manila.i18n import _
|
||||
from manila.i18n import _LE
|
||||
from manila.i18n import _LI
|
||||
from manila.share import driver
|
||||
from manila.share.drivers.hitachi.hsp import rest
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
hitachi_hsp_opts = [
|
||||
cfg.StrOpt('hitachi_hsp_host',
|
||||
required=True,
|
||||
help="HSP management host for communication between Manila "
|
||||
"controller and HSP."),
|
||||
cfg.StrOpt('hitachi_hsp_username',
|
||||
required=True,
|
||||
help="HSP username to perform tasks such as create filesystems"
|
||||
" and shares."),
|
||||
cfg.StrOpt('hitachi_hsp_password',
|
||||
required=True,
|
||||
secret=True,
|
||||
help="HSP password for the username provided."),
|
||||
]
|
||||
|
||||
|
||||
class HitachiHSPDriver(driver.ShareDriver):
|
||||
"""Manila HSP Driver implementation.
|
||||
|
||||
1.0.0 - Initial Version.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(
|
||||
[False], *args, config_opts=[hitachi_hsp_opts], **kwargs)
|
||||
|
||||
self.private_storage = kwargs.get('private_storage')
|
||||
|
||||
self.backend_name = self.configuration.safe_get('share_backend_name')
|
||||
self.hsp_host = self.configuration.safe_get('hitachi_hsp_host')
|
||||
|
||||
self.hsp = rest.HSPRestBackend(
|
||||
self.hsp_host,
|
||||
self.configuration.safe_get('hitachi_hsp_username'),
|
||||
self.configuration.safe_get('hitachi_hsp_password')
|
||||
)
|
||||
|
||||
def _update_share_stats(self, data=None):
|
||||
LOG.debug("Updating Backend Capability Information - Hitachi HSP.")
|
||||
|
||||
reserved = self.configuration.safe_get('reserved_share_percentage')
|
||||
max_over_subscription_ratio = self.configuration.safe_get(
|
||||
'max_over_subscription_ratio')
|
||||
hsp_cluster = self.hsp.get_cluster()
|
||||
|
||||
total_space = hsp_cluster['properties']['total-storage-capacity']
|
||||
free_space = hsp_cluster['properties']['total-storage-available']
|
||||
|
||||
data = {
|
||||
'share_backend_name': self.backend_name,
|
||||
'vendor_name': 'Hitachi',
|
||||
'driver_version': '1.0.0',
|
||||
'storage_protocol': 'NFS',
|
||||
'pools': [{
|
||||
'reserved_percentage': reserved,
|
||||
'pool_name': 'HSP',
|
||||
'thin_provisioning': True,
|
||||
'total_capacity_gb': total_space / units.Gi,
|
||||
'free_capacity_gb': free_space / units.Gi,
|
||||
'max_over_subscription_ratio': max_over_subscription_ratio,
|
||||
'qos': False,
|
||||
'dedupe': False,
|
||||
'compression': False,
|
||||
}],
|
||||
}
|
||||
|
||||
LOG.info(_LI("Hitachi HSP Capabilities: %(data)s."),
|
||||
{'data': data})
|
||||
super(HitachiHSPDriver, self)._update_share_stats(data)
|
||||
|
||||
def create_share(self, context, share, share_server=None):
|
||||
LOG.debug("Creating share in HSP: %(shr)s", {'shr': share['id']})
|
||||
|
||||
if share['share_proto'].lower() != 'nfs':
|
||||
msg = _("Only NFS protocol is currently supported.")
|
||||
raise exception.InvalidShare(reason=msg)
|
||||
|
||||
self.hsp.add_file_system(share['id'], share['size'] * units.Gi)
|
||||
filesystem_id = self.hsp.get_file_system(share['id'])['id']
|
||||
|
||||
try:
|
||||
self.hsp.add_share(share['id'], filesystem_id)
|
||||
except exception.HSPBackendException:
|
||||
with excutils.save_and_reraise_exception():
|
||||
self.hsp.delete_file_system(filesystem_id)
|
||||
msg = _LE("Could not create share %s on HSP.")
|
||||
LOG.exception(msg, share['id'])
|
||||
|
||||
uri = self.hsp_host + ':/' + share['id']
|
||||
|
||||
LOG.debug("Share created successfully on path: %(uri)s.",
|
||||
{'uri': uri})
|
||||
return [{
|
||||
"path": uri,
|
||||
"metadata": {},
|
||||
"is_admin_only": False,
|
||||
}]
|
||||
|
||||
def delete_share(self, context, share, share_server=None):
|
||||
LOG.debug("Deleting share in HSP: %(shr)s.", {'shr': share['id']})
|
||||
|
||||
filesystem_id = hsp_share_id = None
|
||||
|
||||
try:
|
||||
filesystem_id = self.hsp.get_file_system(share['id'])['id']
|
||||
hsp_share_id = self.hsp.get_share(filesystem_id)['id']
|
||||
except exception.HSPItemNotFoundException:
|
||||
LOG.info(_LI("Share %(shr)s already removed from backend."),
|
||||
{'shr': share['id']})
|
||||
|
||||
if hsp_share_id:
|
||||
self.hsp.delete_share(hsp_share_id)
|
||||
|
||||
if filesystem_id:
|
||||
self.hsp.delete_file_system(filesystem_id)
|
||||
|
||||
LOG.debug("Export and share successfully deleted: %(shr)s.",
|
||||
{'shr': share['id']})
|
||||
|
||||
def update_access(self, context, share, access_rules, add_rules,
|
||||
delete_rules, share_server=None):
|
||||
|
||||
LOG.debug("Updating access rules for share: %(shr)s.",
|
||||
{'shr': share['id']})
|
||||
|
||||
try:
|
||||
filesystem_id = self.hsp.get_file_system(share['id'])['id']
|
||||
hsp_share_id = self.hsp.get_share(filesystem_id)['id']
|
||||
except exception.HSPItemNotFoundException:
|
||||
raise exception.ShareResourceNotFound(share_id=share['id'])
|
||||
|
||||
if not (add_rules or delete_rules):
|
||||
# Recovery mode
|
||||
current_rules = self.hsp.get_access_rules(hsp_share_id)
|
||||
|
||||
# Indexing the rules for faster searching
|
||||
hsp_rules_dict = {
|
||||
rule['host-specification']: rule['read-write']
|
||||
for rule in current_rules
|
||||
}
|
||||
|
||||
manila_rules_dict = {}
|
||||
|
||||
for rule in access_rules:
|
||||
if rule['access_type'].lower() != 'ip':
|
||||
msg = _("Only IP access type currently supported.")
|
||||
raise exception.InvalidShareAccess(reason=msg)
|
||||
|
||||
access_to = rule['access_to']
|
||||
is_rw = rule['access_level'] == constants.ACCESS_LEVEL_RW
|
||||
manila_rules_dict[access_to] = is_rw
|
||||
|
||||
# Remove the rules that exist on HSP but not on manila
|
||||
remove_rules = self._get_complement(hsp_rules_dict,
|
||||
manila_rules_dict)
|
||||
|
||||
# Add the rules that exist on manila but not on HSP
|
||||
add_rules = self._get_complement(manila_rules_dict, hsp_rules_dict)
|
||||
|
||||
for rule in remove_rules:
|
||||
self.hsp.delete_access_rule(hsp_share_id, rule[0])
|
||||
|
||||
for rule in add_rules:
|
||||
self.hsp.add_access_rule(hsp_share_id, rule[0], rule[1])
|
||||
else:
|
||||
for rule in delete_rules:
|
||||
if rule['access_type'].lower() != 'ip':
|
||||
continue
|
||||
self.hsp.delete_access_rule(hsp_share_id, rule['access_to'])
|
||||
|
||||
for rule in add_rules:
|
||||
if rule['access_type'].lower() != 'ip':
|
||||
msg = _("Only IP access type currently supported.")
|
||||
raise exception.InvalidShareAccess(reason=msg)
|
||||
|
||||
self.hsp.add_access_rule(
|
||||
hsp_share_id, rule['access_to'],
|
||||
(rule['access_level'] == constants.ACCESS_LEVEL_RW))
|
||||
|
||||
LOG.debug("Successfully updated share %(shr)s rules.",
|
||||
{'shr': share['id']})
|
||||
|
||||
def _get_complement(self, rules_a, rules_b):
|
||||
"""Returns the rules of list A that are not on list B"""
|
||||
complement = []
|
||||
for rule, is_rw in rules_a.items():
|
||||
if rule not in rules_b or rules_b[rule] != is_rw:
|
||||
complement.append((rule, is_rw))
|
||||
|
||||
return complement
|
||||
|
||||
def extend_share(self, share, new_size, share_server=None):
|
||||
LOG.debug("Extending share in HSP: %(shr_id)s.",
|
||||
{'shr_id': share['id']})
|
||||
|
||||
old_size = share['size']
|
||||
hsp_cluster = self.hsp.get_cluster()
|
||||
free_space = hsp_cluster['properties']['total-storage-available']
|
||||
free_space = free_space / units.Gi
|
||||
|
||||
if (new_size - old_size) < free_space:
|
||||
filesystem_id = self.hsp.get_file_system(share['id'])['id']
|
||||
self.hsp.resize_file_system(filesystem_id, new_size * units.Gi)
|
||||
else:
|
||||
msg = (_("Share %s cannot be extended due to insufficient space.")
|
||||
% share['id'])
|
||||
raise exception.HSPBackendException(msg=msg)
|
||||
|
||||
LOG.info(_LI("Share %(shr_id)s successfully extended to "
|
||||
"%(shr_size)sG."),
|
||||
{'shr_id': share['id'],
|
||||
'shr_size': new_size})
|
||||
|
||||
def shrink_share(self, share, new_size, share_server=None):
|
||||
LOG.debug("Shrinking share in HSP: %(shr_id)s.",
|
||||
{'shr_id': share['id']})
|
||||
|
||||
file_system = self.hsp.get_file_system(share['id'])
|
||||
usage = file_system['properties']['used-capacity'] / units.Gi
|
||||
|
||||
LOG.debug("Usage for share %(shr_id)s in HSP: %(usage)sG.",
|
||||
{'shr_id': share['id'], 'usage': usage})
|
||||
|
||||
if new_size > usage:
|
||||
self.hsp.resize_file_system(file_system['id'], new_size * units.Gi)
|
||||
else:
|
||||
raise exception.ShareShrinkingPossibleDataLoss(
|
||||
share_id=share['id'])
|
||||
|
||||
LOG.info(_LI("Share %(shr_id)s successfully shrunk to "
|
||||
"%(shr_size)sG."),
|
||||
{'shr_id': share['id'],
|
||||
'shr_size': new_size})
|
||||
|
||||
def manage_existing(self, share, driver_options):
|
||||
LOG.debug("Managing share in HSP: %(shr_id)s.",
|
||||
{'shr_id': share['id']})
|
||||
|
||||
ip, share_name = share['export_locations'][0]['path'].split(':')
|
||||
|
||||
try:
|
||||
hsp_share = self.hsp.get_share(name=share_name.strip('/'))
|
||||
except exception.HSPItemNotFoundException:
|
||||
msg = _("The share %s trying to be managed was not found on "
|
||||
"backend.") % share['id']
|
||||
raise exception.ManageInvalidShare(reason=msg)
|
||||
|
||||
self.hsp.rename_file_system(hsp_share['properties']['file-system-id'],
|
||||
share['id'])
|
||||
|
||||
original_name = hsp_share['properties']['file-system-name']
|
||||
private_storage_content = {
|
||||
'old_name': original_name,
|
||||
'new_name': share['id'],
|
||||
}
|
||||
self.private_storage.update(share['id'], private_storage_content)
|
||||
|
||||
LOG.debug("Filesystem %(original_name)s was renamed to %(name)s.",
|
||||
{'original_name': original_name,
|
||||
'name': share['id']})
|
||||
|
||||
file_system = self.hsp.get_file_system(share['id'])
|
||||
|
||||
LOG.info(_LI("Share %(shr_path)s was successfully managed with ID "
|
||||
"%(shr_id)s."),
|
||||
{'shr_path': share['export_locations'][0]['path'],
|
||||
'shr_id': share['id']})
|
||||
|
||||
export_locations = [{
|
||||
"path": share['export_locations'][0]['path'],
|
||||
"metadata": {},
|
||||
"is_admin_only": False,
|
||||
}]
|
||||
|
||||
return {'size': file_system['properties']['quota'] / units.Gi,
|
||||
'export_locations': export_locations}
|
||||
|
||||
def unmanage(self, share):
|
||||
original_name = self.private_storage.get(share['id'], 'old_name')
|
||||
|
||||
LOG.debug("Filesystem %(name)s that was originally named "
|
||||
"%(original_name)s will no longer be managed.",
|
||||
{'original_name': original_name,
|
||||
'name': share['id']})
|
||||
|
||||
self.private_storage.delete(share['id'])
|
||||
|
||||
LOG.info(_LI("The share with current path %(shr_path)s and ID "
|
||||
"%(shr_id)s is no longer being managed."),
|
||||
{'shr_path': share['export_locations'][0]['path'],
|
||||
'shr_id': share['id']})
|
207
manila/share/drivers/hitachi/hsp/rest.py
Normal file
207
manila/share/drivers/hitachi/hsp/rest.py
Normal file
@ -0,0 +1,207 @@
|
||||
# Copyright (c) 2016 Hitachi Data Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import requests
|
||||
|
||||
from manila import exception
|
||||
from manila.i18n import _
|
||||
from manila import utils
|
||||
|
||||
|
||||
# Suppress the Insecure request warnings
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
|
||||
|
||||
class HSPRestBackend(object):
|
||||
def __init__(self, hsp_host, hsp_username, hsp_password):
|
||||
self.host = hsp_host
|
||||
self.username = hsp_username
|
||||
self.password = hsp_password
|
||||
|
||||
def _send_post(self, url, payload=None):
|
||||
resp = requests.post(url, auth=(self.username, self.password),
|
||||
data=payload, verify=False)
|
||||
|
||||
if resp.status_code == 202:
|
||||
self._wait_job_status(resp.headers['location'], 'COMPLETE')
|
||||
else:
|
||||
msg = (_("HSP API post failed: %s.") %
|
||||
resp.json()['messages'][0]['message'])
|
||||
raise exception.HSPBackendException(msg=msg)
|
||||
|
||||
def _send_get(self, url, payload=None):
|
||||
resp = requests.get(url, auth=(self.username, self.password),
|
||||
data=payload, verify=False)
|
||||
|
||||
if resp.status_code == 200:
|
||||
if resp.content == 'null':
|
||||
return None
|
||||
else:
|
||||
return resp.json()
|
||||
else:
|
||||
msg = (_("HSP API get failed: %s.") %
|
||||
resp.json()['messages'][0]['message'])
|
||||
raise exception.HSPBackendException(msg=msg)
|
||||
|
||||
def _send_delete(self, url, payload=None):
|
||||
resp = requests.delete(url, auth=(self.username, self.password),
|
||||
data=payload, verify=False)
|
||||
|
||||
if resp.status_code == 202:
|
||||
self._wait_job_status(resp.headers['location'], 'COMPLETE')
|
||||
else:
|
||||
msg = (_("HSP API delete failed: %s.") %
|
||||
resp.json()['messages'][0]['message'])
|
||||
raise exception.HSPBackendException(msg=msg)
|
||||
|
||||
def add_file_system(self, name, quota):
|
||||
url = "https://%s/hspapi/file-systems/" % self.host
|
||||
payload = {
|
||||
'quota': quota,
|
||||
'auto-access': False,
|
||||
'enabled': True,
|
||||
'description': '',
|
||||
'record-access-time': True,
|
||||
'tags': '',
|
||||
# Usage percentage in which a warning will be shown
|
||||
'space-hwm': 90,
|
||||
# Usage percentage in which the warning will be cleared
|
||||
'space-lwm': 70,
|
||||
'name': name,
|
||||
}
|
||||
self._send_post(url, payload=json.dumps(payload))
|
||||
|
||||
def get_file_system(self, name):
|
||||
url = ("https://%s/hspapi/file-systems/list?name=%s" %
|
||||
(self.host, name))
|
||||
|
||||
filesystems = self._send_get(url)
|
||||
|
||||
try:
|
||||
return filesystems['list'][0]
|
||||
except (TypeError, KeyError, IndexError):
|
||||
msg = _("Filesystem does not exist or is not available.")
|
||||
raise exception.HSPItemNotFoundException(msg=msg)
|
||||
|
||||
def delete_file_system(self, filesystem_id):
|
||||
url = "https://%s/hspapi/file-systems/%s" % (self.host, filesystem_id)
|
||||
self._send_delete(url)
|
||||
|
||||
def resize_file_system(self, filesystem_id, new_size):
|
||||
url = "https://%s/hspapi/file-systems/%s" % (self.host, filesystem_id)
|
||||
payload = {'quota': new_size}
|
||||
|
||||
self._send_post(url, payload=json.dumps(payload))
|
||||
|
||||
def rename_file_system(self, filesystem_id, new_name):
|
||||
url = "https://%s/hspapi/file-systems/%s" % (self.host, filesystem_id)
|
||||
payload = {'name': new_name}
|
||||
|
||||
self._send_post(url, payload=json.dumps(payload))
|
||||
|
||||
def add_share(self, name, filesystem_id):
|
||||
url = "https://%s/hspapi/shares/" % self.host
|
||||
payload = {
|
||||
'description': '',
|
||||
'type': 'NFS',
|
||||
'enabled': True,
|
||||
'tags': '',
|
||||
'name': name,
|
||||
'file-system-id': filesystem_id,
|
||||
}
|
||||
|
||||
self._send_post(url, payload=json.dumps(payload))
|
||||
|
||||
def get_share(self, fs_id=None, name=None):
|
||||
if fs_id is not None:
|
||||
url = ('https://%s/hspapi/shares/list?file-system-id=%s' %
|
||||
(self.host, fs_id))
|
||||
elif name is not None:
|
||||
url = ('https://%s/hspapi/shares/list?name=%s' %
|
||||
(self.host, name))
|
||||
share = self._send_get(url)
|
||||
|
||||
try:
|
||||
return share['list'][0]
|
||||
except (TypeError, KeyError, IndexError):
|
||||
msg = _("Share %s does not exist or is not available.")
|
||||
|
||||
if fs_id is not None:
|
||||
args = "for filesystem %s" % fs_id
|
||||
else:
|
||||
args = name
|
||||
|
||||
raise exception.HSPItemNotFoundException(msg=msg % args)
|
||||
|
||||
def delete_share(self, share_id):
|
||||
url = "https://%s/hspapi/shares/%s" % (self.host, share_id)
|
||||
self._send_delete(url)
|
||||
|
||||
def add_access_rule(self, share_id, host_to, read_write):
|
||||
url = "https://%s/hspapi/shares/%s/" % (self.host, share_id)
|
||||
payload = {
|
||||
"action": "add-access-rule",
|
||||
"name": share_id + host_to,
|
||||
"host-specification": host_to,
|
||||
"read-write": read_write,
|
||||
}
|
||||
|
||||
self._send_post(url, payload=json.dumps(payload))
|
||||
|
||||
def delete_access_rule(self, share_id, host_to):
|
||||
url = "https://%s/hspapi/shares/%s/" % (self.host, share_id)
|
||||
payload = {
|
||||
"action": "delete-access-rule",
|
||||
"name": share_id + host_to,
|
||||
}
|
||||
|
||||
self._send_post(url, payload=json.dumps(payload))
|
||||
|
||||
def get_access_rules(self, share_id):
|
||||
url = ("https://%s/hspapi/shares/%s/access-rules" %
|
||||
(self.host, share_id))
|
||||
rules = self._send_get(url)
|
||||
|
||||
try:
|
||||
rules = rules['list']
|
||||
except (TypeError, KeyError, IndexError):
|
||||
rules = []
|
||||
return rules
|
||||
|
||||
def get_cluster(self):
|
||||
url = "https://%s/hspapi/clusters/list" % self.host
|
||||
clusters = self._send_get(url)
|
||||
|
||||
try:
|
||||
return clusters['list'][0]
|
||||
except (TypeError, KeyError, IndexError):
|
||||
msg = _("No cluster was found on HSP.")
|
||||
raise exception.HSPBackendException(msg=msg)
|
||||
|
||||
@utils.retry(exception.HSPTimeoutException, retries=10, wait_random=True)
|
||||
def _wait_job_status(self, job_url, target_status):
|
||||
resp_json = self._send_get(job_url)
|
||||
|
||||
status = resp_json['properties']['completion-status']
|
||||
|
||||
if status == 'ERROR':
|
||||
msg = _("HSP job %s failed.")
|
||||
args = resp_json['id']
|
||||
raise exception.HSPBackendException(msg=msg % args)
|
||||
elif status != target_status:
|
||||
msg = _("Timeout while waiting for job %s to complete.")
|
||||
args = resp_json['id']
|
||||
raise exception.HSPTimeoutException(msg=msg % args)
|
@ -49,6 +49,10 @@ def set_defaults(conf):
|
||||
_safe_set_of_opts(conf, 'zfs_share_helpers', 'NFS=foo.bar.Helper')
|
||||
_safe_set_of_opts(conf, 'zfs_replica_snapshot_prefix', 'foo_prefix_')
|
||||
|
||||
_safe_set_of_opts(conf, 'hitachi_hsp_host', '172.24.47.190')
|
||||
_safe_set_of_opts(conf, 'hitachi_hsp_username', 'hsp_user')
|
||||
_safe_set_of_opts(conf, 'hitachi_hsp_password', 'hsp_password')
|
||||
|
||||
|
||||
def _safe_set_of_opts(conf, *args, **kwargs):
|
||||
try:
|
||||
|
0
manila/tests/share/drivers/hitachi/hsp/__init__.py
Normal file
0
manila/tests/share/drivers/hitachi/hsp/__init__.py
Normal file
85
manila/tests/share/drivers/hitachi/hsp/fakes.py
Normal file
85
manila/tests/share/drivers/hitachi/hsp/fakes.py
Normal file
@ -0,0 +1,85 @@
|
||||
# Copyright (c) 2016 Hitachi Data Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
file_system = {
|
||||
'id': '33689245-1806-45d0-8507-0700b5f89750',
|
||||
'properties': {
|
||||
'cluster-id': '85d5b9e2-27f3-11e6-8b50-005056a75f66',
|
||||
'quota': 107374182400,
|
||||
'name': '07c966f9-fea2-4e12-ab72-97cb3c529bb5',
|
||||
'used-capacity': 53687091200,
|
||||
'free-capacity': 53687091200
|
||||
},
|
||||
}
|
||||
|
||||
share = {
|
||||
'id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
|
||||
'name': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
|
||||
'properties': {
|
||||
'file-system-id': '33689245-1806-45d0-8507-0700b5f89750',
|
||||
'file-system-name': 'fake_name',
|
||||
},
|
||||
}
|
||||
|
||||
invalid_share = {
|
||||
'id': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
|
||||
'name': 'aa4a7710-f326-41fb-ad18-b4ad587fc87a',
|
||||
'size': 100,
|
||||
'host': 'hsp',
|
||||
'share_proto': 'CIFS',
|
||||
}
|
||||
|
||||
access_rule = {
|
||||
'id': 'acdc7172b-fe07-46c4-b78f-df3e0324ccd0',
|
||||
'access_type': 'ip',
|
||||
'access_to': '172.24.44.200',
|
||||
'access_level': 'rw',
|
||||
}
|
||||
|
||||
hsp_rules = [{
|
||||
'name': 'qa_access',
|
||||
'host-specification': '172.24.44.200',
|
||||
'read-write': 'true',
|
||||
}]
|
||||
|
||||
hsp_cluster = {
|
||||
'id': '835e7c00-9d04-11e5-a935-f4521480e990',
|
||||
'properties': {
|
||||
'total-storage-capacity': 107374182400,
|
||||
'total-storage-used': 53687091200,
|
||||
'total-storage-available': 53687091200,
|
||||
'total-file-system-capacity': 107374182400,
|
||||
'total-file-system-space-used': 53687091200,
|
||||
'total-file-system-space-available': 53687091200
|
||||
},
|
||||
}
|
||||
|
||||
stats_data = {
|
||||
'share_backend_name': 'HSP',
|
||||
'vendor_name': 'Hitachi',
|
||||
'driver_version': '1.0.0',
|
||||
'storage_protocol': 'NFS',
|
||||
'pools': [{
|
||||
'reserved_percentage': 0,
|
||||
'pool_name': 'HSP',
|
||||
'thin_provisioning': True,
|
||||
'total_capacity_gb': 100,
|
||||
'free_capacity_gb': 50,
|
||||
'max_over_subscription_ratio': 20,
|
||||
'qos': False,
|
||||
'dedupe': False,
|
||||
'compression': False,
|
||||
}],
|
||||
}
|
415
manila/tests/share/drivers/hitachi/hsp/test_driver.py
Normal file
415
manila/tests/share/drivers/hitachi/hsp/test_driver.py
Normal file
@ -0,0 +1,415 @@
|
||||
# Copyright (c) 2016 Hitachi Data Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
|
||||
from manila import exception
|
||||
import manila.share.configuration
|
||||
import manila.share.driver
|
||||
from manila.share.drivers.hitachi.hsp import driver
|
||||
from manila.share.drivers.hitachi.hsp import rest
|
||||
from manila import test
|
||||
from manila.tests import fake_share
|
||||
from manila.tests.share.drivers.hitachi.hsp import fakes
|
||||
|
||||
from manila.common import constants
|
||||
from oslo_utils import units
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class HitachiHSPTestCase(test.TestCase):
|
||||
def setUp(self):
|
||||
super(HitachiHSPTestCase, self).setUp()
|
||||
CONF.set_default('driver_handles_share_servers', False)
|
||||
CONF.hitachi_hsp_host = '172.24.47.190'
|
||||
CONF.hitachi_hsp_username = 'hsp_user'
|
||||
CONF.hitachi_hsp_password = 'hsp_password'
|
||||
CONF.hitachi_hsp_job_timeout = 300
|
||||
|
||||
self.fake_el = [{
|
||||
"path": CONF.hitachi_hsp_host + ":/fakeinstanceid",
|
||||
"metadata": {},
|
||||
"is_admin_only": False,
|
||||
}]
|
||||
self.fake_share = fake_share.fake_share(share_proto='nfs')
|
||||
self.fake_share_instance = fake_share.fake_share_instance(
|
||||
base_share=self.fake_share, export_locations=self.fake_el)
|
||||
|
||||
self.fake_conf = manila.share.configuration.Configuration(None)
|
||||
self.fake_private_storage = mock.Mock()
|
||||
self.mock_object(rest.HSPRestBackend, "get_cluster",
|
||||
mock.Mock(return_value=fakes.hsp_cluster))
|
||||
self._driver = driver.HitachiHSPDriver(
|
||||
configuration=self.fake_conf,
|
||||
private_storage=self.fake_private_storage)
|
||||
self._driver.backend_name = "HSP"
|
||||
self.mock_log = self.mock_object(driver, 'LOG')
|
||||
|
||||
def test_update_access_add(self):
|
||||
access = {
|
||||
'access_type': 'ip',
|
||||
'access_to': '172.24.10.10',
|
||||
'access_level': 'rw',
|
||||
}
|
||||
|
||||
access_list = [access]
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "get_share",
|
||||
mock.Mock(return_value=fakes.share))
|
||||
self.mock_object(rest.HSPRestBackend, "add_access_rule", mock.Mock())
|
||||
|
||||
self._driver.update_access('context', self.fake_share_instance, [],
|
||||
access_list, [])
|
||||
|
||||
self.assertTrue(self.mock_log.debug.called)
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.get_share.assert_called_once_with(
|
||||
fakes.file_system['id'])
|
||||
rest.HSPRestBackend.add_access_rule.assert_called_once_with(
|
||||
fakes.share['id'], access['access_to'],
|
||||
(access['access_level'] == constants.ACCESS_LEVEL_RW))
|
||||
|
||||
def test_update_access_recovery(self):
|
||||
access1 = {
|
||||
'access_type': 'ip',
|
||||
'access_to': '172.24.10.10',
|
||||
'access_level': 'rw',
|
||||
}
|
||||
access2 = {
|
||||
'access_type': 'ip',
|
||||
'access_to': '188.100.20.10',
|
||||
'access_level': 'ro',
|
||||
}
|
||||
|
||||
access_list = [access1, access2]
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "get_share",
|
||||
mock.Mock(return_value=fakes.share))
|
||||
self.mock_object(rest.HSPRestBackend, "get_access_rules",
|
||||
mock.Mock(return_value=fakes.hsp_rules))
|
||||
self.mock_object(rest.HSPRestBackend, "delete_access_rule")
|
||||
self.mock_object(rest.HSPRestBackend, "add_access_rule")
|
||||
|
||||
self._driver.update_access('context', self.fake_share_instance,
|
||||
access_list, [], [])
|
||||
|
||||
self.assertTrue(self.mock_log.debug.called)
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.get_share.assert_called_once_with(
|
||||
fakes.file_system['id'])
|
||||
rest.HSPRestBackend.get_access_rules.assert_called_once_with(
|
||||
fakes.share['id'])
|
||||
rest.HSPRestBackend.delete_access_rule.assert_called_once_with(
|
||||
fakes.share['id'], fakes.hsp_rules[0]['host-specification'])
|
||||
rest.HSPRestBackend.add_access_rule.assert_has_calls([
|
||||
mock.call(fakes.share['id'], access1['access_to'], True),
|
||||
mock.call(fakes.share['id'], access2['access_to'], False)
|
||||
], any_order=True)
|
||||
|
||||
def test_update_access_delete(self):
|
||||
access1 = {
|
||||
'access_type': 'ip',
|
||||
'access_to': '172.24.10.10',
|
||||
'access_level': 'rw',
|
||||
}
|
||||
access2 = {
|
||||
'access_type': 'something',
|
||||
'access_to': '188.100.20.10',
|
||||
'access_level': 'ro',
|
||||
}
|
||||
|
||||
delete_rules = [access1, access2]
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "get_share",
|
||||
mock.Mock(return_value=fakes.share))
|
||||
self.mock_object(rest.HSPRestBackend, "delete_access_rule",
|
||||
mock.Mock())
|
||||
|
||||
self._driver.update_access('context', self.fake_share_instance, [], [],
|
||||
delete_rules)
|
||||
|
||||
self.assertTrue(self.mock_log.debug.called)
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.get_share.assert_called_once_with(
|
||||
fakes.file_system['id'])
|
||||
rest.HSPRestBackend.delete_access_rule.assert_called_once_with(
|
||||
fakes.share['id'], delete_rules[0]['access_to'])
|
||||
|
||||
@ddt.data(True, False)
|
||||
def test_update_access_ip_exception(self, is_recovery):
|
||||
access = {
|
||||
'access_type': 'something',
|
||||
'access_to': '172.24.10.10',
|
||||
'access_level': 'rw',
|
||||
}
|
||||
|
||||
access_list = [access]
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "get_share",
|
||||
mock.Mock(return_value=fakes.share))
|
||||
self.mock_object(rest.HSPRestBackend, "get_access_rules",
|
||||
mock.Mock(return_value=fakes.hsp_rules))
|
||||
|
||||
if is_recovery:
|
||||
access_args = [access_list, [], []]
|
||||
else:
|
||||
access_args = [[], access_list, []]
|
||||
|
||||
self.assertRaises(exception.InvalidShareAccess,
|
||||
self._driver.update_access, 'context',
|
||||
self.fake_share_instance, *access_args)
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.get_share.assert_called_once_with(
|
||||
fakes.file_system['id'])
|
||||
|
||||
if is_recovery:
|
||||
rest.HSPRestBackend.get_access_rules.assert_called_once_with(
|
||||
fakes.share['id'])
|
||||
|
||||
def test_update_access_not_found_exception(self):
|
||||
access_list = []
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system", mock.Mock(
|
||||
side_effect=exception.HSPItemNotFoundException(msg='fake')))
|
||||
|
||||
self.assertRaises(exception.ShareResourceNotFound,
|
||||
self._driver.update_access, 'context',
|
||||
self.fake_share_instance, access_list, [], [])
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
|
||||
def test_create_share(self):
|
||||
self.mock_object(rest.HSPRestBackend, "add_file_system", mock.Mock())
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "add_share", mock.Mock())
|
||||
|
||||
result = self._driver.create_share('context', self.fake_share_instance)
|
||||
|
||||
self.assertEqual(self.fake_el, result)
|
||||
self.assertTrue(self.mock_log.debug.called)
|
||||
|
||||
rest.HSPRestBackend.add_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'],
|
||||
self.fake_share_instance['size'] * units.Gi)
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.add_share.assert_called_once_with(
|
||||
self.fake_share_instance['id'], fakes.file_system['id'])
|
||||
|
||||
def test_create_share_export_error(self):
|
||||
self.mock_object(rest.HSPRestBackend, "add_file_system", mock.Mock())
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "add_share", mock.Mock(
|
||||
side_effect=exception.HSPBackendException(msg='fake')))
|
||||
self.mock_object(rest.HSPRestBackend, "delete_file_system",
|
||||
mock.Mock())
|
||||
|
||||
self.assertRaises(exception.HSPBackendException,
|
||||
self._driver.create_share, 'context',
|
||||
self.fake_share_instance)
|
||||
self.assertTrue(self.mock_log.debug.called)
|
||||
self.assertTrue(self.mock_log.exception.called)
|
||||
|
||||
rest.HSPRestBackend.add_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'],
|
||||
self.fake_share_instance['size'] * units.Gi)
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.add_share.assert_called_once_with(
|
||||
self.fake_share_instance['id'], fakes.file_system['id'])
|
||||
rest.HSPRestBackend.delete_file_system.assert_called_once_with(
|
||||
fakes.file_system['id'])
|
||||
|
||||
def test_create_share_invalid_share_protocol(self):
|
||||
self.assertRaises(exception.InvalidShare,
|
||||
self._driver.create_share, 'context',
|
||||
fakes.invalid_share)
|
||||
|
||||
def test_delete_share(self):
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "get_share",
|
||||
mock.Mock(return_value=fakes.share))
|
||||
self.mock_object(rest.HSPRestBackend, "delete_share", mock.Mock())
|
||||
self.mock_object(rest.HSPRestBackend, "delete_file_system",
|
||||
mock.Mock())
|
||||
|
||||
self._driver.delete_share('context', self.fake_share_instance)
|
||||
|
||||
self.assertTrue(self.mock_log.debug.called)
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.get_share.assert_called_once_with(
|
||||
fakes.file_system['id'])
|
||||
rest.HSPRestBackend.delete_share.assert_called_once_with(
|
||||
fakes.share['id'])
|
||||
rest.HSPRestBackend.delete_file_system.assert_called_once_with(
|
||||
fakes.file_system['id'])
|
||||
|
||||
def test_delete_share_already_deleted(self):
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system", mock.Mock(
|
||||
side_effect=exception.HSPItemNotFoundException(msg='fake')))
|
||||
|
||||
self.mock_object(driver.LOG, "info")
|
||||
|
||||
self._driver.delete_share('context', self.fake_share_instance)
|
||||
|
||||
self.assertTrue(self.mock_log.info.called)
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
|
||||
def test_extend_share(self):
|
||||
new_size = 2
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "resize_file_system",
|
||||
mock.Mock())
|
||||
|
||||
self._driver.extend_share(self.fake_share_instance, new_size)
|
||||
|
||||
self.assertTrue(self.mock_log.info.called)
|
||||
|
||||
rest.HSPRestBackend.get_cluster.assert_called_once_with()
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.resize_file_system.assert_called_once_with(
|
||||
fakes.file_system['id'], new_size * units.Gi)
|
||||
|
||||
def test_extend_share_with_no_available_space_in_fs(self):
|
||||
new_size = 150
|
||||
|
||||
self.assertRaises(exception.HSPBackendException,
|
||||
self._driver.extend_share, self.fake_share_instance,
|
||||
new_size)
|
||||
|
||||
rest.HSPRestBackend.get_cluster.assert_called_once_with()
|
||||
|
||||
def test_shrink_share(self):
|
||||
new_size = 70
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
self.mock_object(rest.HSPRestBackend, "resize_file_system",
|
||||
mock.Mock())
|
||||
|
||||
self._driver.shrink_share(self.fake_share_instance, new_size)
|
||||
|
||||
self.assertTrue(self.mock_log.info.called)
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.resize_file_system.assert_called_once_with(
|
||||
fakes.file_system['id'], new_size * units.Gi)
|
||||
|
||||
def test_shrink_share_new_size_lower_than_usage(self):
|
||||
new_size = 20
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
|
||||
self.assertRaises(exception.ShareShrinkingPossibleDataLoss,
|
||||
self._driver.shrink_share, self.fake_share_instance,
|
||||
new_size)
|
||||
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
|
||||
def test_manage_existing(self):
|
||||
self.mock_object(self.fake_private_storage, "update")
|
||||
self.mock_object(rest.HSPRestBackend, "get_share",
|
||||
mock.Mock(return_value=fakes.share))
|
||||
self.mock_object(rest.HSPRestBackend, "rename_file_system",
|
||||
mock.Mock())
|
||||
self.mock_object(rest.HSPRestBackend, "get_file_system",
|
||||
mock.Mock(return_value=fakes.file_system))
|
||||
|
||||
result = self._driver.manage_existing(self.fake_share_instance,
|
||||
'option')
|
||||
|
||||
expected = {
|
||||
'size': fakes.file_system['properties']['quota'] / units.Gi,
|
||||
'export_locations': self.fake_el,
|
||||
}
|
||||
|
||||
self.assertTrue(self.mock_log.info.called)
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
rest.HSPRestBackend.get_share.assert_called_once_with(
|
||||
name=self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.rename_file_system.assert_called_once_with(
|
||||
fakes.file_system['id'], self.fake_share_instance['id'])
|
||||
rest.HSPRestBackend.get_file_system.assert_called_once_with(
|
||||
self.fake_share_instance['id'])
|
||||
|
||||
def test_manage_existing_wrong_share_id(self):
|
||||
self.mock_object(rest.HSPRestBackend, "get_share", mock.Mock(
|
||||
side_effect=exception.HSPItemNotFoundException(msg='fake')))
|
||||
|
||||
self.assertRaises(exception.ManageInvalidShare,
|
||||
self._driver.manage_existing,
|
||||
self.fake_share_instance,
|
||||
'option')
|
||||
|
||||
rest.HSPRestBackend.get_share.assert_called_once_with(
|
||||
name=self.fake_share_instance['id'])
|
||||
|
||||
def test_unmanage(self):
|
||||
self.mock_object(self.fake_private_storage, "get",
|
||||
mock.Mock(
|
||||
return_value='original_name'))
|
||||
self.mock_object(self.fake_private_storage, "delete")
|
||||
|
||||
self._driver.unmanage(self.fake_share_instance)
|
||||
|
||||
self.assertTrue(self.mock_log.info.called)
|
||||
|
||||
def test__update_share_stats(self):
|
||||
mock__update_share_stats = self.mock_object(
|
||||
manila.share.driver.ShareDriver, '_update_share_stats')
|
||||
self.mock_object(self.fake_private_storage, 'get', mock.Mock(
|
||||
return_value={'provisioned': 0}
|
||||
))
|
||||
|
||||
self._driver._update_share_stats()
|
||||
|
||||
rest.HSPRestBackend.get_cluster.assert_called_once_with()
|
||||
mock__update_share_stats.assert_called_once_with(fakes.stats_data)
|
||||
self.assertTrue(self.mock_log.info.called)
|
344
manila/tests/share/drivers/hitachi/hsp/test_rest.py
Normal file
344
manila/tests/share/drivers/hitachi/hsp/test_rest.py
Normal file
@ -0,0 +1,344 @@
|
||||
# Copyright (c) 2016 Hitachi Data Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
import json
|
||||
import mock
|
||||
import requests
|
||||
import time
|
||||
|
||||
from manila import exception
|
||||
from manila.share.drivers.hitachi.hsp import rest
|
||||
from manila import test
|
||||
from manila.tests.share.drivers.hitachi.hsp import fakes
|
||||
|
||||
|
||||
class FakeRequests(object):
|
||||
status_code = 0
|
||||
headers = {}
|
||||
content = ""
|
||||
|
||||
def __init__(self, status_code, content='null'):
|
||||
self.status_code = status_code
|
||||
self.headers = {'location': 'fake_location'}
|
||||
self.content = content
|
||||
|
||||
def json(self):
|
||||
return {'messages': [{'message': 'fake_msg'}]}
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class HitachiHSPRestTestCase(test.TestCase):
|
||||
def setUp(self):
|
||||
super(HitachiHSPRestTestCase, self).setUp()
|
||||
self.hitachi_hsp_host = '172.24.47.190'
|
||||
self.hitachi_hsp_username = 'hds_hnas_user'
|
||||
self.hitachi_hsp_password = 'hds_hnas_password'
|
||||
|
||||
self._driver = rest.HSPRestBackend(self.hitachi_hsp_host,
|
||||
self.hitachi_hsp_username,
|
||||
self.hitachi_hsp_password)
|
||||
|
||||
@ddt.data(202, 500)
|
||||
def test__send_post(self, code):
|
||||
self.mock_object(requests, "post", mock.Mock(
|
||||
return_value=FakeRequests(code)))
|
||||
|
||||
if code == 202:
|
||||
self.mock_object(rest.HSPRestBackend, "_wait_job_status",
|
||||
mock.Mock())
|
||||
self._driver._send_post('fake_url')
|
||||
|
||||
rest.HSPRestBackend._wait_job_status.assert_called_once_with(
|
||||
'fake_location', 'COMPLETE')
|
||||
else:
|
||||
self.assertRaises(exception.HSPBackendException,
|
||||
self._driver._send_post, 'fake_url')
|
||||
|
||||
@ddt.data({'code': 200, 'content': 'null'},
|
||||
{'code': 200, 'content': 'fake_content'},
|
||||
{'code': 500, 'content': 'null'})
|
||||
@ddt.unpack
|
||||
def test__send_get(self, code, content):
|
||||
self.mock_object(requests, "get", mock.Mock(
|
||||
return_value=FakeRequests(code, content)))
|
||||
|
||||
if code == 200:
|
||||
result = self._driver._send_get('fake_url')
|
||||
if content == 'null':
|
||||
self.assertIsNone(result)
|
||||
else:
|
||||
self.assertEqual(FakeRequests(code, content).json(), result)
|
||||
else:
|
||||
self.assertRaises(exception.HSPBackendException,
|
||||
self._driver._send_get, 'fake_url')
|
||||
|
||||
@ddt.data(202, 500)
|
||||
def test__send_delete(self, code):
|
||||
self.mock_object(requests, "delete", mock.Mock(
|
||||
return_value=FakeRequests(code)))
|
||||
|
||||
if code == 202:
|
||||
self.mock_object(rest.HSPRestBackend, "_wait_job_status",
|
||||
mock.Mock())
|
||||
self._driver._send_delete('fake_url')
|
||||
|
||||
rest.HSPRestBackend._wait_job_status.assert_called_once_with(
|
||||
'fake_location', 'COMPLETE')
|
||||
else:
|
||||
self.assertRaises(exception.HSPBackendException,
|
||||
self._driver._send_delete, 'fake_url')
|
||||
|
||||
def test_add_file_system(self):
|
||||
url = "https://172.24.47.190/hspapi/file-systems/"
|
||||
|
||||
payload = {
|
||||
'quota': fakes.file_system['properties']['quota'],
|
||||
'auto-access': False,
|
||||
'enabled': True,
|
||||
'description': '',
|
||||
'record-access-time': True,
|
||||
'tags': '',
|
||||
'space-hwm': 90,
|
||||
'space-lwm': 70,
|
||||
'name': fakes.file_system['properties']['name'],
|
||||
}
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_post", mock.Mock())
|
||||
self._driver.add_file_system(fakes.file_system['properties']['name'],
|
||||
fakes.file_system['properties']['quota'])
|
||||
|
||||
rest.HSPRestBackend._send_post.assert_called_once_with(
|
||||
url, payload=json.dumps(payload))
|
||||
|
||||
def test_get_file_system(self):
|
||||
url = ("https://172.24.47.190/hspapi/file-systems/list?name=%s" %
|
||||
fakes.file_system['properties']['name'])
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_get", mock.Mock(
|
||||
return_value={'list': [fakes.file_system]}))
|
||||
|
||||
result = self._driver.get_file_system(
|
||||
fakes.file_system['properties']['name'])
|
||||
|
||||
self.assertEqual(fakes.file_system, result)
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_called_once_with(url)
|
||||
|
||||
def test_get_file_system_exception(self):
|
||||
url = ("https://172.24.47.190/hspapi/file-systems/list?name=%s" %
|
||||
fakes.file_system['properties']['name'])
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_get",
|
||||
mock.Mock(return_value=None))
|
||||
|
||||
self.assertRaises(exception.HSPItemNotFoundException,
|
||||
self._driver.get_file_system,
|
||||
fakes.file_system['properties']['name'])
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_called_once_with(url)
|
||||
|
||||
def test_delete_file_system(self):
|
||||
url = ("https://172.24.47.190/hspapi/file-systems/%s" %
|
||||
fakes.file_system['id'])
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_delete", mock.Mock())
|
||||
self._driver.delete_file_system(fakes.file_system['id'])
|
||||
|
||||
rest.HSPRestBackend._send_delete.assert_called_once_with(url)
|
||||
|
||||
def test_resize_file_system(self):
|
||||
url = ("https://172.24.47.190/hspapi/file-systems/%s" %
|
||||
fakes.file_system['id'])
|
||||
new_size = 53687091200
|
||||
payload = {'quota': new_size}
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_post", mock.Mock())
|
||||
self._driver.resize_file_system(fakes.file_system['id'], new_size)
|
||||
|
||||
rest.HSPRestBackend._send_post.assert_called_once_with(
|
||||
url, payload=json.dumps(payload))
|
||||
|
||||
def test_rename_file_system(self):
|
||||
url = ("https://172.24.47.190/hspapi/file-systems/%s" %
|
||||
fakes.file_system['id'])
|
||||
new_name = "fs_rename"
|
||||
payload = {'name': new_name}
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_post", mock.Mock())
|
||||
|
||||
self._driver.rename_file_system(fakes.file_system['id'], new_name)
|
||||
|
||||
rest.HSPRestBackend._send_post.assert_called_once_with(
|
||||
url, payload=json.dumps(payload))
|
||||
|
||||
def test_add_share(self):
|
||||
url = "https://172.24.47.190/hspapi/shares/"
|
||||
payload = {
|
||||
'description': '',
|
||||
'type': 'NFS',
|
||||
'enabled': True,
|
||||
'tags': '',
|
||||
'name': fakes.share['name'],
|
||||
'file-system-id': fakes.share['properties']['file-system-id'],
|
||||
}
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_post", mock.Mock())
|
||||
|
||||
self._driver.add_share(fakes.share['name'],
|
||||
fakes.share['properties']['file-system-id'])
|
||||
|
||||
rest.HSPRestBackend._send_post.assert_called_once_with(
|
||||
url, payload=json.dumps(payload))
|
||||
|
||||
@ddt.data({'fs_id': None,
|
||||
'name': fakes.share['name'],
|
||||
'url': 'https://172.24.47.190/hspapi/shares/list?'
|
||||
'name=aa4a7710-f326-41fb-ad18-b4ad587fc87a'},
|
||||
{'fs_id': fakes.share['properties']['file-system-id'],
|
||||
'name': None,
|
||||
'url': 'https://172.24.47.190/hspapi/shares/list?'
|
||||
'file-system-id=33689245-1806-45d0-8507-0700b5f89750'})
|
||||
@ddt.unpack
|
||||
def test_get_share(self, fs_id, name, url):
|
||||
self.mock_object(rest.HSPRestBackend, "_send_get",
|
||||
mock.Mock(return_value={'list': [fakes.share]}))
|
||||
|
||||
result = self._driver.get_share(fs_id, name)
|
||||
|
||||
self.assertEqual(fakes.share, result)
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_called_once_with(url)
|
||||
|
||||
def test_get_share_exception(self):
|
||||
url = ("https://172.24.47.190/hspapi/shares/list?"
|
||||
"name=aa4a7710-f326-41fb-ad18-b4ad587fc87a")
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_get", mock.Mock(
|
||||
return_value=None))
|
||||
|
||||
self.assertRaises(exception.HSPItemNotFoundException,
|
||||
self._driver.get_share, None, fakes.share['name'])
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_called_once_with(url)
|
||||
|
||||
def test_delete_share(self):
|
||||
url = "https://172.24.47.190/hspapi/shares/%s" % fakes.share['id']
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_delete", mock.Mock())
|
||||
|
||||
self._driver.delete_share(fakes.share['id'])
|
||||
|
||||
rest.HSPRestBackend._send_delete.assert_called_once_with(url)
|
||||
|
||||
def test_add_access_rule(self):
|
||||
url = "https://172.24.47.190/hspapi/shares/%s/" % fakes.share['id']
|
||||
payload = {
|
||||
"action": "add-access-rule",
|
||||
"name": fakes.share['id'] + fakes.access_rule['access_to'],
|
||||
"host-specification": fakes.access_rule['access_to'],
|
||||
"read-write": fakes.access_rule['access_level'],
|
||||
}
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_post", mock.Mock())
|
||||
|
||||
self._driver.add_access_rule(fakes.share['id'],
|
||||
fakes.access_rule['access_to'],
|
||||
fakes.access_rule['access_level'])
|
||||
|
||||
rest.HSPRestBackend._send_post.assert_called_once_with(
|
||||
url, payload=json.dumps(payload))
|
||||
|
||||
def test_delete_access_rule(self):
|
||||
url = "https://172.24.47.190/hspapi/shares/%s/" % fakes.share['id']
|
||||
payload = {
|
||||
"action": "delete-access-rule",
|
||||
"name": fakes.share['id'] + fakes.access_rule['access_to'],
|
||||
}
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_post", mock.Mock())
|
||||
|
||||
self._driver.delete_access_rule(fakes.share['id'],
|
||||
fakes.access_rule['access_to'])
|
||||
|
||||
rest.HSPRestBackend._send_post.assert_called_once_with(
|
||||
url, payload=json.dumps(payload))
|
||||
|
||||
@ddt.data({'value': {'list': fakes.hsp_rules}, 'res': fakes.hsp_rules},
|
||||
{'value': None, 'res': []})
|
||||
@ddt.unpack
|
||||
def test_get_access_rules(self, value, res):
|
||||
url = ("https://172.24.47.190/hspapi/shares/%s/access-rules" %
|
||||
fakes.share['id'])
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_get", mock.Mock(
|
||||
return_value=value))
|
||||
|
||||
result = self._driver.get_access_rules(fakes.share['id'])
|
||||
|
||||
self.assertEqual(res, result)
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_called_once_with(url)
|
||||
|
||||
@ddt.data({'list': [fakes.hsp_cluster]}, None)
|
||||
def test_get_clusters(self, value):
|
||||
url = "https://172.24.47.190/hspapi/clusters/list"
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_get", mock.Mock(
|
||||
return_value=value))
|
||||
|
||||
if value:
|
||||
result = self._driver.get_cluster()
|
||||
|
||||
self.assertEqual(fakes.hsp_cluster, result)
|
||||
else:
|
||||
self.assertRaises(exception.HSPBackendException,
|
||||
self._driver.get_cluster)
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_called_once_with(url)
|
||||
|
||||
@ddt.data('COMPLETE', 'ERROR', 'RUNNING')
|
||||
def test__wait_job_status(self, stat):
|
||||
url = "fake_job_url"
|
||||
json = {
|
||||
'id': 'fake_id',
|
||||
'properties': {'completion-status': stat},
|
||||
'messages': [{
|
||||
'id': 'fake_id',
|
||||
'message': 'fake_msg'
|
||||
}]
|
||||
}
|
||||
|
||||
self.mock_object(rest.HSPRestBackend, "_send_get", mock.Mock(
|
||||
return_value=json))
|
||||
self.mock_object(time, "sleep")
|
||||
|
||||
if stat == 'COMPLETE':
|
||||
self._driver._wait_job_status(url, 'COMPLETE')
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_called_once_with(url)
|
||||
elif stat == 'ERROR':
|
||||
self.assertRaises(exception.HSPBackendException,
|
||||
self._driver._wait_job_status, url, 'COMPLETE')
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_called_once_with(url)
|
||||
else:
|
||||
self.assertRaises(exception.HSPTimeoutException,
|
||||
self._driver._wait_job_status, url, 'COMPLETE')
|
||||
|
||||
rest.HSPRestBackend._send_get.assert_has_calls([
|
||||
mock.call(url), mock.call(url), mock.call(url), mock.call(url),
|
||||
mock.call(url),
|
||||
])
|
7
releasenotes/notes/hsp-driver-e00aff5bc89d4b54.yaml
Normal file
7
releasenotes/notes/hsp-driver-e00aff5bc89d4b54.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
prelude: >
|
||||
Add Hitachi HSP driver.
|
||||
features:
|
||||
- Added new Hitachi HSP driver, that supports manage/unmanage and
|
||||
shrinking of shares, along with all the minimum driver features. Does
|
||||
not support snapshots.
|
Loading…
x
Reference in New Issue
Block a user