Merge "HPE 3PAR driver pool support"
This commit is contained in:
commit
07563cff3d
@ -769,6 +769,8 @@ class ShareDriver(object):
|
|||||||
|
|
||||||
Redefine it within share driver when it is going to handle share
|
Redefine it within share driver when it is going to handle share
|
||||||
servers.
|
servers.
|
||||||
|
|
||||||
|
:param metadata: a dictionary, for now containing a key 'request_host'
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ import os
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
from oslo_config import types
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
import six
|
import six
|
||||||
|
|
||||||
@ -31,8 +32,90 @@ from manila.i18n import _LI
|
|||||||
from manila.share import driver
|
from manila.share import driver
|
||||||
from manila.share.drivers.hpe import hpe_3par_mediator
|
from manila.share.drivers.hpe import hpe_3par_mediator
|
||||||
from manila.share import share_types
|
from manila.share import share_types
|
||||||
|
from manila.share import utils as share_utils
|
||||||
from manila import utils
|
from manila import utils
|
||||||
|
|
||||||
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class FPG(types.String, types.IPAddress):
|
||||||
|
"""FPG type.
|
||||||
|
|
||||||
|
Used to represent multiple pools per backend values.
|
||||||
|
Converts configuration value to an FPGs value.
|
||||||
|
FPGs value format:
|
||||||
|
FPG name, IP address 1, IP address 2, ..., IP address 4
|
||||||
|
where FPG name is a string value,
|
||||||
|
IP address is of type types.IPAddress
|
||||||
|
|
||||||
|
Optionally doing range checking.
|
||||||
|
If value is whitespace or empty string will raise error
|
||||||
|
|
||||||
|
:param min_ip: Optional check that number of min IP address of VFS.
|
||||||
|
:param max_ip: Optional check that number of max IP address of VFS.
|
||||||
|
:param type_name: Type name to be used in the sample config file.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
MAX_SUPPORTED_IP_PER_VFS = 4
|
||||||
|
|
||||||
|
def __init__(self, min_ip=0, max_ip=MAX_SUPPORTED_IP_PER_VFS,
|
||||||
|
type_name='FPG'):
|
||||||
|
types.String.__init__(self, type_name=type_name)
|
||||||
|
types.IPAddress.__init__(self, type_name=type_name)
|
||||||
|
|
||||||
|
if max_ip < min_ip:
|
||||||
|
msg = _("Pool's max acceptable IP cannot be less than min.")
|
||||||
|
raise exception.HPE3ParInvalid(err=msg)
|
||||||
|
|
||||||
|
if min_ip < 0:
|
||||||
|
msg = _("Pools must be configured with zero or more IPs.")
|
||||||
|
raise exception.HPE3ParInvalid(err=msg)
|
||||||
|
|
||||||
|
if max_ip > FPG.MAX_SUPPORTED_IP_PER_VFS:
|
||||||
|
msg = (_("Pool's max acceptable IP cannot be greater than "
|
||||||
|
"supported value=%s.") % FPG.MAX_SUPPORTED_IP_PER_VFS)
|
||||||
|
raise exception.HPE3ParInvalid(err=msg)
|
||||||
|
|
||||||
|
self.min_ip = min_ip
|
||||||
|
self.max_ip = max_ip
|
||||||
|
|
||||||
|
def __call__(self, value):
|
||||||
|
if value is None or value.strip(' ') is '':
|
||||||
|
message = _("Invalid configuration. hpe3par_fpg must be set.")
|
||||||
|
LOG.error(message)
|
||||||
|
raise exception.HPE3ParInvalid(err=message)
|
||||||
|
|
||||||
|
ips = []
|
||||||
|
values = value.split(",")
|
||||||
|
# Extract pool name
|
||||||
|
pool_name = values.pop(0).strip()
|
||||||
|
|
||||||
|
# values will now be ['ip1', ...]
|
||||||
|
if len(values) < self.min_ip:
|
||||||
|
msg = (_("Require at least %s IPs configured per "
|
||||||
|
"pool") % self.min_ip)
|
||||||
|
raise exception.HPE3ParInvalid(err=msg)
|
||||||
|
if len(values) > self.max_ip:
|
||||||
|
msg = (_("Cannot configure IPs more than max supported "
|
||||||
|
"%s IPs per pool") % self.max_ip)
|
||||||
|
raise exception.HPE3ParInvalid(err=msg)
|
||||||
|
|
||||||
|
for ip_addr in values:
|
||||||
|
ip_addr = types.String.__call__(self, ip_addr.strip())
|
||||||
|
try:
|
||||||
|
ips.append(types.IPAddress.__call__(self, ip_addr))
|
||||||
|
except ValueError as verror:
|
||||||
|
raise exception.HPE3ParInvalid(err=verror)
|
||||||
|
fpg = {pool_name: ips}
|
||||||
|
return fpg
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'FPG'
|
||||||
|
|
||||||
|
def _formatter(self, value):
|
||||||
|
return six.text_type(value)
|
||||||
|
|
||||||
HPE3PAR_OPTS = [
|
HPE3PAR_OPTS = [
|
||||||
cfg.StrOpt('hpe3par_api_url',
|
cfg.StrOpt('hpe3par_api_url',
|
||||||
default='',
|
default='',
|
||||||
@ -65,14 +148,10 @@ HPE3PAR_OPTS = [
|
|||||||
default=22,
|
default=22,
|
||||||
help='SSH port to use with SAN',
|
help='SSH port to use with SAN',
|
||||||
deprecated_name='hp3par_san_ssh_port'),
|
deprecated_name='hp3par_san_ssh_port'),
|
||||||
cfg.StrOpt('hpe3par_fpg',
|
cfg.MultiOpt('hpe3par_fpg',
|
||||||
default="OpenStack",
|
item_type=FPG(min_ip=0, max_ip=FPG.MAX_SUPPORTED_IP_PER_VFS),
|
||||||
help="The File Provisioning Group (FPG) to use",
|
help="The File Provisioning Group (FPG) to use",
|
||||||
deprecated_name='hp3par_fpg'),
|
deprecated_name='hp3par_fpg'),
|
||||||
cfg.StrOpt('hpe3par_share_ip_address',
|
|
||||||
default='',
|
|
||||||
help="The IP address for shares not using a share server",
|
|
||||||
deprecated_name='hp3par_share_ip_address'),
|
|
||||||
cfg.BoolOpt('hpe3par_fstore_per_share',
|
cfg.BoolOpt('hpe3par_fstore_per_share',
|
||||||
default=False,
|
default=False,
|
||||||
help="Use one filestore per share",
|
help="Use one filestore per share",
|
||||||
@ -107,7 +186,13 @@ HPE3PAR_OPTS = [
|
|||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
CONF.register_opts(HPE3PAR_OPTS)
|
CONF.register_opts(HPE3PAR_OPTS)
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
|
||||||
|
def to_list(var):
|
||||||
|
"""Convert var to list type if not"""
|
||||||
|
if isinstance(var, six.string_types):
|
||||||
|
return [var]
|
||||||
|
else:
|
||||||
|
return var
|
||||||
|
|
||||||
|
|
||||||
class HPE3ParShareDriver(driver.ShareDriver):
|
class HPE3ParShareDriver(driver.ShareDriver):
|
||||||
@ -126,10 +211,11 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
2.0.4 - Reduce the fsquota by share size
|
2.0.4 - Reduce the fsquota by share size
|
||||||
when a share is deleted #1582931
|
when a share is deleted #1582931
|
||||||
2.0.5 - Add update_access support
|
2.0.5 - Add update_access support
|
||||||
|
2.0.6 - Multi pool support per backend
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
VERSION = "2.0.5"
|
VERSION = "2.0.6"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(HPE3ParShareDriver, self).__init__((True, False),
|
super(HPE3ParShareDriver, self).__init__((True, False),
|
||||||
@ -140,9 +226,7 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
self.configuration.append_config_values(HPE3PAR_OPTS)
|
self.configuration.append_config_values(HPE3PAR_OPTS)
|
||||||
self.configuration.append_config_values(driver.ssh_opts)
|
self.configuration.append_config_values(driver.ssh_opts)
|
||||||
self.configuration.append_config_values(config.global_opts)
|
self.configuration.append_config_values(config.global_opts)
|
||||||
self.fpg = None
|
self.fpgs = {}
|
||||||
self.vfs = None
|
|
||||||
self.share_ip_address = None
|
|
||||||
self._hpe3par = None # mediator between driver and client
|
self._hpe3par = None # mediator between driver and client
|
||||||
|
|
||||||
def do_setup(self, context):
|
def do_setup(self, context):
|
||||||
@ -152,14 +236,6 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
{'driver_name': self.__class__.__name__,
|
{'driver_name': self.__class__.__name__,
|
||||||
'version': self.VERSION})
|
'version': self.VERSION})
|
||||||
|
|
||||||
if not self.driver_handles_share_servers:
|
|
||||||
self.share_ip_address = self.configuration.hpe3par_share_ip_address
|
|
||||||
if not self.share_ip_address:
|
|
||||||
raise exception.HPE3ParInvalid(
|
|
||||||
_("Unsupported configuration. "
|
|
||||||
"hpe3par_share_ip_address must be set when "
|
|
||||||
"driver_handles_share_servers is False."))
|
|
||||||
|
|
||||||
mediator = hpe_3par_mediator.HPE3ParMediator(
|
mediator = hpe_3par_mediator.HPE3ParMediator(
|
||||||
hpe3par_username=self.configuration.hpe3par_username,
|
hpe3par_username=self.configuration.hpe3par_username,
|
||||||
hpe3par_password=self.configuration.hpe3par_password,
|
hpe3par_password=self.configuration.hpe3par_password,
|
||||||
@ -172,8 +248,6 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
hpe3par_fstore_per_share=(self.configuration
|
hpe3par_fstore_per_share=(self.configuration
|
||||||
.hpe3par_fstore_per_share),
|
.hpe3par_fstore_per_share),
|
||||||
hpe3par_require_cifs_ip=self.configuration.hpe3par_require_cifs_ip,
|
hpe3par_require_cifs_ip=self.configuration.hpe3par_require_cifs_ip,
|
||||||
hpe3par_share_ip_address=(
|
|
||||||
self.configuration.hpe3par_share_ip_address),
|
|
||||||
hpe3par_cifs_admin_access_username=(
|
hpe3par_cifs_admin_access_username=(
|
||||||
self.configuration.hpe3par_cifs_admin_access_username),
|
self.configuration.hpe3par_cifs_admin_access_username),
|
||||||
hpe3par_cifs_admin_access_password=(
|
hpe3par_cifs_admin_access_password=(
|
||||||
@ -188,15 +262,94 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
|
|
||||||
mediator.do_setup()
|
mediator.do_setup()
|
||||||
|
|
||||||
# FPG must be configured and must exist.
|
def _validate_pool_ips(addresses, conf_pool_ips):
|
||||||
self.fpg = self.configuration.safe_get('hpe3par_fpg')
|
# Pool configured IP addresses should be subset of IP addresses
|
||||||
# Validate the FPG and discover the VFS
|
# retured from vfs
|
||||||
# This also validates the client, connection, firmware, WSAPI, FPG...
|
addresses = to_list(addresses)
|
||||||
self.vfs = mediator.get_vfs_name(self.fpg)
|
if not set(conf_pool_ips) <= set(addresses):
|
||||||
|
msg = _("Incorrect configuration. "
|
||||||
|
"Configuration pool IP address did not match with "
|
||||||
|
"IP addresses at 3par array")
|
||||||
|
raise exception.HPE3ParInvalid(err=msg)
|
||||||
|
|
||||||
|
def _construct_fpg():
|
||||||
|
# FPG must be configured and must exist.
|
||||||
|
# self.configuration.safe_get('hpe3par_fpg') will have value in
|
||||||
|
# following format:
|
||||||
|
# [ {'pool_name':['ip_addr', 'ip_addr', ...]}, ... ]
|
||||||
|
for fpg in self.configuration.safe_get('hpe3par_fpg'):
|
||||||
|
pool_name = list(fpg)[0]
|
||||||
|
conf_pool_ips = fpg[pool_name]
|
||||||
|
|
||||||
|
# Validate the FPG and discover the VFS
|
||||||
|
# This also validates the client, connection, firmware, WSAPI,
|
||||||
|
# FPG...
|
||||||
|
vfs_info = mediator.get_vfs(pool_name)
|
||||||
|
if self.driver_handles_share_servers:
|
||||||
|
# Use discovered IP(s) from array
|
||||||
|
vfs_info['vfsip']['address'] = to_list(
|
||||||
|
vfs_info['vfsip']['address'])
|
||||||
|
self.fpgs[pool_name] = {
|
||||||
|
vfs_info['vfsname']: vfs_info['vfsip']['address']}
|
||||||
|
elif conf_pool_ips == []:
|
||||||
|
# not DHSS and IPs not configured in manila.conf.
|
||||||
|
if not vfs_info['vfsip']['address']:
|
||||||
|
msg = _("Unsupported configuration. "
|
||||||
|
"hpe3par_fpg must have IP address "
|
||||||
|
"or be discoverable at 3PAR")
|
||||||
|
LOG.error(msg)
|
||||||
|
raise exception.HPE3ParInvalid(err=msg)
|
||||||
|
else:
|
||||||
|
# Use discovered pool ips
|
||||||
|
vfs_info['vfsip']['address'] = to_list(
|
||||||
|
vfs_info['vfsip']['address'])
|
||||||
|
self.fpgs[pool_name] = {
|
||||||
|
vfs_info['vfsname']: vfs_info['vfsip']['address']}
|
||||||
|
else:
|
||||||
|
# not DHSS and IPs configured in manila.conf
|
||||||
|
_validate_pool_ips(vfs_info['vfsip']['address'],
|
||||||
|
conf_pool_ips)
|
||||||
|
self.fpgs[pool_name] = {
|
||||||
|
vfs_info['vfsname']: conf_pool_ips}
|
||||||
|
|
||||||
|
_construct_fpg()
|
||||||
|
|
||||||
# Don't set _hpe3par until it is ready. Otherwise _update_stats fails.
|
# Don't set _hpe3par until it is ready. Otherwise _update_stats fails.
|
||||||
self._hpe3par = mediator
|
self._hpe3par = mediator
|
||||||
|
|
||||||
|
def _get_pool_location_from_share_host(self, share_instance_host):
|
||||||
|
# Return pool name, vfs, IPs for a pool from share instance host
|
||||||
|
pool_name = share_utils.extract_host(share_instance_host, level='pool')
|
||||||
|
if not pool_name:
|
||||||
|
message = (_("Pool is not available in the share host %s.") %
|
||||||
|
share_instance_host)
|
||||||
|
raise exception.InvalidHost(reason=message)
|
||||||
|
|
||||||
|
if pool_name not in self.fpgs:
|
||||||
|
message = (_("Pool location lookup failed. "
|
||||||
|
"Could not find pool %s") %
|
||||||
|
pool_name)
|
||||||
|
raise exception.InvalidHost(reason=message)
|
||||||
|
|
||||||
|
vfs = list(self.fpgs[pool_name])[0]
|
||||||
|
ips = self.fpgs[pool_name][vfs]
|
||||||
|
|
||||||
|
return (pool_name, vfs, ips)
|
||||||
|
|
||||||
|
def _get_pool_location(self, share, share_server=None):
|
||||||
|
# Return pool name, vfs, IPs for a pool from share host field
|
||||||
|
# Use share_server if provided, instead of self.fpgs
|
||||||
|
if share_server is not None:
|
||||||
|
# When DHSS
|
||||||
|
ips = share_server['backend_details'].get('ip')
|
||||||
|
ips = to_list(ips)
|
||||||
|
vfs = share_server['backend_details'].get('vfs')
|
||||||
|
pool_name = share_server['backend_details'].get('fpg')
|
||||||
|
return (pool_name, vfs, ips)
|
||||||
|
else:
|
||||||
|
# When DHSS = false
|
||||||
|
return self._get_pool_location_from_share_host(share['host'])
|
||||||
|
|
||||||
def check_for_setup_error(self):
|
def check_for_setup_error(self):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -230,6 +383,31 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
def get_network_allocations_number(self):
|
def get_network_allocations_number(self):
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
def choose_share_server_compatible_with_share(self, context, share_servers,
|
||||||
|
share, snapshot=None,
|
||||||
|
consistency_group=None):
|
||||||
|
"""Method that allows driver to choose share server for provided share.
|
||||||
|
|
||||||
|
If compatible share-server is not found, method should return None.
|
||||||
|
|
||||||
|
:param context: Current context
|
||||||
|
:param share_servers: list with share-server models
|
||||||
|
:param share: share model
|
||||||
|
:param snapshot: snapshot model
|
||||||
|
:param consistency_group: ConsistencyGroup model with shares
|
||||||
|
:returns: share-server or None
|
||||||
|
"""
|
||||||
|
# If creating in a consistency group, raise exception
|
||||||
|
if consistency_group:
|
||||||
|
msg = _("HPE 3PAR driver does not support consistency group")
|
||||||
|
raise exception.InvalidRequest(message=msg)
|
||||||
|
|
||||||
|
pool_name = share_utils.extract_host(share['host'], level='pool')
|
||||||
|
for share_server in share_servers:
|
||||||
|
if share_server['backend_details'].get('fpg') == pool_name:
|
||||||
|
return share_server
|
||||||
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _validate_network_type(network_type):
|
def _validate_network_type(network_type):
|
||||||
if network_type not in ('flat', 'vlan', None):
|
if network_type not in ('flat', 'vlan', None):
|
||||||
@ -238,37 +416,53 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
raise exception.NetworkBadConfigurationException(
|
raise exception.NetworkBadConfigurationException(
|
||||||
reason=reason % network_type)
|
reason=reason % network_type)
|
||||||
|
|
||||||
|
def _create_share_server(self, network_info, request_host=None):
|
||||||
|
"""Is called to create/setup share server"""
|
||||||
|
# Return pool name, vfs, IPs for a pool
|
||||||
|
pool_name, vfs, ips = self._get_pool_location_from_share_host(
|
||||||
|
request_host)
|
||||||
|
|
||||||
|
ip = network_info['network_allocations'][0]['ip_address']
|
||||||
|
if ip not in ips:
|
||||||
|
# Besides DHSS, admin could have setup IP to VFS directly on array
|
||||||
|
if len(ips) > (FPG.MAX_SUPPORTED_IP_PER_VFS - 1):
|
||||||
|
message = (_("Pool %s has exceeded 3PAR's "
|
||||||
|
"max supported VFS IP address") % pool_name)
|
||||||
|
LOG.error(message)
|
||||||
|
raise exception.Invalid(message)
|
||||||
|
|
||||||
|
subnet = utils.cidr_to_netmask(network_info['cidr'])
|
||||||
|
vlantag = network_info['segmentation_id']
|
||||||
|
|
||||||
|
self._hpe3par.create_fsip(ip, subnet, vlantag, pool_name, vfs)
|
||||||
|
# Update in global saved config, self.fpgs[pool_name]
|
||||||
|
ips.append(ip)
|
||||||
|
|
||||||
|
return {'share_server_name': network_info['server_id'],
|
||||||
|
'share_server_id': network_info['server_id'],
|
||||||
|
'ip': ip,
|
||||||
|
'subnet': subnet,
|
||||||
|
'vlantag': vlantag if vlantag else 0,
|
||||||
|
'fpg': pool_name,
|
||||||
|
'vfs': vfs}
|
||||||
|
|
||||||
def _setup_server(self, network_info, metadata=None):
|
def _setup_server(self, network_info, metadata=None):
|
||||||
|
|
||||||
LOG.debug("begin _setup_server with %s", network_info)
|
LOG.debug("begin _setup_server with %s", network_info)
|
||||||
|
|
||||||
self._validate_network_type(network_info['network_type'])
|
self._validate_network_type(network_info['network_type'])
|
||||||
|
if metadata is not None and metadata['request_host'] is not None:
|
||||||
ip = network_info['network_allocations'][0]['ip_address']
|
return self._create_share_server(network_info,
|
||||||
subnet = utils.cidr_to_netmask(network_info['cidr'])
|
metadata['request_host'])
|
||||||
vlantag = network_info['segmentation_id']
|
|
||||||
|
|
||||||
self._hpe3par.create_fsip(ip, subnet, vlantag, self.fpg, self.vfs)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'share_server_name': network_info['server_id'],
|
|
||||||
'share_server_id': network_info['server_id'],
|
|
||||||
'ip': ip,
|
|
||||||
'subnet': subnet,
|
|
||||||
'vlantag': vlantag if vlantag else 0,
|
|
||||||
'fpg': self.fpg,
|
|
||||||
'vfs': self.vfs,
|
|
||||||
}
|
|
||||||
|
|
||||||
def _teardown_server(self, server_details, security_services=None):
|
def _teardown_server(self, server_details, security_services=None):
|
||||||
LOG.debug("begin _teardown_server with %s", server_details)
|
LOG.debug("begin _teardown_server with %s", server_details)
|
||||||
|
fpg = server_details.get('fpg')
|
||||||
self._hpe3par.remove_fsip(server_details.get('ip'),
|
vfs = server_details.get('vfs')
|
||||||
server_details.get('fpg'),
|
ip = server_details.get('ip')
|
||||||
server_details.get('vfs'))
|
self._hpe3par.remove_fsip(ip, fpg, vfs)
|
||||||
|
if ip in self.fpgs[fpg][vfs]:
|
||||||
def _get_share_ip(self, share_server):
|
self.fpgs[fpg][vfs].remove(ip)
|
||||||
return share_server['backend_details'].get('ip') if share_server else (
|
|
||||||
self.share_ip_address)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def build_share_comment(share):
|
def build_share_comment(share):
|
||||||
@ -289,7 +483,7 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
def create_share(self, context, share, share_server=None):
|
def create_share(self, context, share, share_server=None):
|
||||||
"""Is called to create share."""
|
"""Is called to create share."""
|
||||||
|
|
||||||
ip = self._get_share_ip(share_server)
|
fpg, vfs, ips = self._get_pool_location(share, share_server)
|
||||||
|
|
||||||
protocol = share['share_proto']
|
protocol = share['share_proto']
|
||||||
extra_specs = share_types.get_extra_specs_from_share(share)
|
extra_specs = share_types.get_extra_specs_from_share(share)
|
||||||
@ -299,18 +493,18 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
share['id'],
|
share['id'],
|
||||||
protocol,
|
protocol,
|
||||||
extra_specs,
|
extra_specs,
|
||||||
self.fpg, self.vfs,
|
fpg, vfs,
|
||||||
size=share['size'],
|
size=share['size'],
|
||||||
comment=self.build_share_comment(share)
|
comment=self.build_share_comment(share)
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._hpe3par.build_export_location(protocol, ip, path)
|
return self._hpe3par.build_export_locations(protocol, ips, path)
|
||||||
|
|
||||||
def create_share_from_snapshot(self, context, share, snapshot,
|
def create_share_from_snapshot(self, context, share, snapshot,
|
||||||
share_server=None):
|
share_server=None):
|
||||||
"""Is called to create share from snapshot."""
|
"""Is called to create share from snapshot."""
|
||||||
|
|
||||||
ip = self._get_share_ip(share_server)
|
fpg, vfs, ips = self._get_pool_location(share, share_server)
|
||||||
|
|
||||||
protocol = share['share_proto']
|
protocol = share['share_proto']
|
||||||
extra_specs = share_types.get_extra_specs_from_share(share)
|
extra_specs = share_types.get_extra_specs_from_share(share)
|
||||||
@ -322,43 +516,50 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
share['project_id'],
|
share['project_id'],
|
||||||
snapshot['share_id'],
|
snapshot['share_id'],
|
||||||
snapshot['id'],
|
snapshot['id'],
|
||||||
self.fpg,
|
fpg,
|
||||||
self.vfs,
|
vfs,
|
||||||
|
ips,
|
||||||
size=share['size'],
|
size=share['size'],
|
||||||
comment=self.build_share_comment(share)
|
comment=self.build_share_comment(share)
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._hpe3par.build_export_location(protocol, ip, path)
|
return self._hpe3par.build_export_locations(protocol, ips, path)
|
||||||
|
|
||||||
def delete_share(self, context, share, share_server=None):
|
def delete_share(self, context, share, share_server=None):
|
||||||
"""Deletes share and its fstore."""
|
"""Deletes share and its fstore."""
|
||||||
|
|
||||||
|
fpg, vfs, ips = self._get_pool_location(share, share_server)
|
||||||
self._hpe3par.delete_share(share['project_id'],
|
self._hpe3par.delete_share(share['project_id'],
|
||||||
share['id'],
|
share['id'],
|
||||||
share['size'],
|
share['size'],
|
||||||
share['share_proto'],
|
share['share_proto'],
|
||||||
self.fpg,
|
fpg,
|
||||||
self.vfs)
|
vfs,
|
||||||
|
ips[0])
|
||||||
|
|
||||||
def create_snapshot(self, context, snapshot, share_server=None):
|
def create_snapshot(self, context, snapshot, share_server=None):
|
||||||
"""Creates a snapshot of a share."""
|
"""Creates a snapshot of a share."""
|
||||||
|
|
||||||
|
fpg, vfs, ips = self._get_pool_location(snapshot['share'],
|
||||||
|
share_server)
|
||||||
self._hpe3par.create_snapshot(snapshot['share']['project_id'],
|
self._hpe3par.create_snapshot(snapshot['share']['project_id'],
|
||||||
snapshot['share']['id'],
|
snapshot['share']['id'],
|
||||||
snapshot['share']['share_proto'],
|
snapshot['share']['share_proto'],
|
||||||
snapshot['id'],
|
snapshot['id'],
|
||||||
self.fpg,
|
fpg,
|
||||||
self.vfs)
|
vfs)
|
||||||
|
|
||||||
def delete_snapshot(self, context, snapshot, share_server=None):
|
def delete_snapshot(self, context, snapshot, share_server=None):
|
||||||
"""Deletes a snapshot of a share."""
|
"""Deletes a snapshot of a share."""
|
||||||
|
|
||||||
|
fpg, vfs, ips = self._get_pool_location(snapshot['share'],
|
||||||
|
share_server)
|
||||||
self._hpe3par.delete_snapshot(snapshot['share']['project_id'],
|
self._hpe3par.delete_snapshot(snapshot['share']['project_id'],
|
||||||
snapshot['share']['id'],
|
snapshot['share']['id'],
|
||||||
snapshot['share']['share_proto'],
|
snapshot['share']['share_proto'],
|
||||||
snapshot['id'],
|
snapshot['id'],
|
||||||
self.fpg,
|
fpg,
|
||||||
self.vfs)
|
vfs)
|
||||||
|
|
||||||
def ensure_share(self, context, share, share_server=None):
|
def ensure_share(self, context, share, share_server=None):
|
||||||
pass
|
pass
|
||||||
@ -370,6 +571,7 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
if 'NFS' == share['share_proto']: # Avoiding DB call otherwise
|
if 'NFS' == share['share_proto']: # Avoiding DB call otherwise
|
||||||
extra_specs = share_types.get_extra_specs_from_share(share)
|
extra_specs = share_types.get_extra_specs_from_share(share)
|
||||||
|
|
||||||
|
fpg, vfs, ips = self._get_pool_location(share, share_server)
|
||||||
self._hpe3par.update_access(share['project_id'],
|
self._hpe3par.update_access(share['project_id'],
|
||||||
share['id'],
|
share['id'],
|
||||||
share['share_proto'],
|
share['share_proto'],
|
||||||
@ -377,28 +579,32 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
access_rules,
|
access_rules,
|
||||||
add_rules,
|
add_rules,
|
||||||
delete_rules,
|
delete_rules,
|
||||||
self.fpg,
|
fpg,
|
||||||
self.vfs)
|
vfs)
|
||||||
|
|
||||||
def extend_share(self, share, new_size, share_server=None):
|
def extend_share(self, share, new_size, share_server=None):
|
||||||
"""Extends size of existing share."""
|
"""Extends size of existing share."""
|
||||||
|
|
||||||
|
fpg, vfs, ips = self._get_pool_location(share, share_server)
|
||||||
self._hpe3par.resize_share(share['project_id'],
|
self._hpe3par.resize_share(share['project_id'],
|
||||||
share['id'],
|
share['id'],
|
||||||
share['share_proto'],
|
share['share_proto'],
|
||||||
new_size,
|
new_size,
|
||||||
share['size'],
|
share['size'],
|
||||||
self.fpg,
|
fpg,
|
||||||
self.vfs)
|
vfs)
|
||||||
|
|
||||||
def shrink_share(self, share, new_size, share_server=None):
|
def shrink_share(self, share, new_size, share_server=None):
|
||||||
"""Shrinks size of existing share."""
|
"""Shrinks size of existing share."""
|
||||||
|
|
||||||
|
fpg, vfs, ips = self._get_pool_location(share, share_server)
|
||||||
self._hpe3par.resize_share(share['project_id'],
|
self._hpe3par.resize_share(share['project_id'],
|
||||||
share['id'],
|
share['id'],
|
||||||
share['share_proto'],
|
share['share_proto'],
|
||||||
new_size,
|
new_size,
|
||||||
share['size'],
|
share['size'],
|
||||||
self.fpg,
|
fpg,
|
||||||
self.vfs)
|
vfs)
|
||||||
|
|
||||||
def _update_share_stats(self):
|
def _update_share_stats(self):
|
||||||
"""Retrieve stats info from share group."""
|
"""Retrieve stats info from share group."""
|
||||||
@ -434,8 +640,10 @@ class HPE3ParShareDriver(driver.ShareDriver):
|
|||||||
_LI("Skipping capacity and capabilities update. Setup has not "
|
_LI("Skipping capacity and capabilities update. Setup has not "
|
||||||
"completed."))
|
"completed."))
|
||||||
else:
|
else:
|
||||||
fpg_status = self._hpe3par.get_fpg_status(self.fpg)
|
for fpg in self.fpgs:
|
||||||
LOG.debug("FPG status = %s.", fpg_status)
|
fpg_status = self._hpe3par.get_fpg_status(fpg)
|
||||||
stats.update(fpg_status)
|
fpg_status['reserved_percentage'] = reserved_share_percentage
|
||||||
|
LOG.debug("FPG status = %s.", fpg_status)
|
||||||
|
stats.setdefault('pools', []).append(fpg_status)
|
||||||
|
|
||||||
super(HPE3ParShareDriver, self)._update_share_stats(stats)
|
super(HPE3ParShareDriver, self)._update_share_stats(stats)
|
||||||
|
@ -77,10 +77,11 @@ class HPE3ParMediator(object):
|
|||||||
when a share is deleted #1582931
|
when a share is deleted #1582931
|
||||||
2.0.6 - Read-write share from snapshot (using driver mount and copy)
|
2.0.6 - Read-write share from snapshot (using driver mount and copy)
|
||||||
2.0.7 - Add update_access support
|
2.0.7 - Add update_access support
|
||||||
|
2.0.8 - Multi pools support per backend
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
VERSION = "2.0.7"
|
VERSION = "2.0.8"
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
|
|
||||||
@ -95,7 +96,6 @@ class HPE3ParMediator(object):
|
|||||||
self.hpe3par_san_private_key = kwargs.get('hpe3par_san_private_key')
|
self.hpe3par_san_private_key = kwargs.get('hpe3par_san_private_key')
|
||||||
self.hpe3par_fstore_per_share = kwargs.get('hpe3par_fstore_per_share')
|
self.hpe3par_fstore_per_share = kwargs.get('hpe3par_fstore_per_share')
|
||||||
self.hpe3par_require_cifs_ip = kwargs.get('hpe3par_require_cifs_ip')
|
self.hpe3par_require_cifs_ip = kwargs.get('hpe3par_require_cifs_ip')
|
||||||
self.hpe3par_share_ip_address = kwargs.get('hpe3par_share_ip_address')
|
|
||||||
self.hpe3par_cifs_admin_access_username = (
|
self.hpe3par_cifs_admin_access_username = (
|
||||||
kwargs.get('hpe3par_cifs_admin_access_username'))
|
kwargs.get('hpe3par_cifs_admin_access_username'))
|
||||||
self.hpe3par_cifs_admin_access_password = (
|
self.hpe3par_cifs_admin_access_password = (
|
||||||
@ -204,9 +204,9 @@ class HPE3ParMediator(object):
|
|||||||
# don't raise exception on logout()
|
# don't raise exception on logout()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def build_export_location(protocol, ip, path):
|
def build_export_locations(protocol, ips, path):
|
||||||
|
|
||||||
if not ip:
|
if not ips:
|
||||||
message = _('Failed to build export location due to missing IP.')
|
message = _('Failed to build export location due to missing IP.')
|
||||||
raise exception.InvalidInput(reason=message)
|
raise exception.InvalidInput(reason=message)
|
||||||
|
|
||||||
@ -216,11 +216,9 @@ class HPE3ParMediator(object):
|
|||||||
|
|
||||||
share_proto = HPE3ParMediator.ensure_supported_protocol(protocol)
|
share_proto = HPE3ParMediator.ensure_supported_protocol(protocol)
|
||||||
if share_proto == 'nfs':
|
if share_proto == 'nfs':
|
||||||
location = ':'.join((ip, path))
|
return ['%s:%s' % (ip, path) for ip in ips]
|
||||||
else:
|
else:
|
||||||
location = r'\\%s\%s' % (ip, path)
|
return [r'\\%s\%s' % (ip, path) for ip in ips]
|
||||||
|
|
||||||
return location
|
|
||||||
|
|
||||||
def get_provisioned_gb(self, fpg):
|
def get_provisioned_gb(self, fpg):
|
||||||
total_mb = 0
|
total_mb = 0
|
||||||
@ -288,6 +286,7 @@ class HPE3ParMediator(object):
|
|||||||
hpe3par_flash_cache = flash_cache_policy == ENABLED
|
hpe3par_flash_cache = flash_cache_policy == ENABLED
|
||||||
|
|
||||||
status = {
|
status = {
|
||||||
|
'pool_name': fpg,
|
||||||
'total_capacity_gb': total_capacity_gb,
|
'total_capacity_gb': total_capacity_gb,
|
||||||
'free_capacity_gb': free_capacity_gb,
|
'free_capacity_gb': free_capacity_gb,
|
||||||
'thin_provisioning': thin_provisioning,
|
'thin_provisioning': thin_provisioning,
|
||||||
@ -310,7 +309,7 @@ class HPE3ParMediator(object):
|
|||||||
message = (_('Invalid protocol. Expected nfs or smb. Got %s.') %
|
message = (_('Invalid protocol. Expected nfs or smb. Got %s.') %
|
||||||
protocol)
|
protocol)
|
||||||
LOG.error(message)
|
LOG.error(message)
|
||||||
raise exception.InvalidInput(message)
|
raise exception.InvalidShareAccess(reason=message)
|
||||||
return protocol
|
return protocol
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -523,7 +522,7 @@ class HPE3ParMediator(object):
|
|||||||
msg = (_('Failed to create fstore %(fstore)s: %(e)s') %
|
msg = (_('Failed to create fstore %(fstore)s: %(e)s') %
|
||||||
{'fstore': fstore, 'e': six.text_type(e)})
|
{'fstore': fstore, 'e': six.text_type(e)})
|
||||||
LOG.exception(msg)
|
LOG.exception(msg)
|
||||||
raise exception.ShareBackendException(msg)
|
raise exception.ShareBackendException(msg=msg)
|
||||||
|
|
||||||
if size:
|
if size:
|
||||||
self._update_capacity_quotas(fstore, size, 0, fpg, vfs)
|
self._update_capacity_quotas(fstore, size, 0, fpg, vfs)
|
||||||
@ -545,7 +544,7 @@ class HPE3ParMediator(object):
|
|||||||
msg = (_('Failed to create share %(share_name)s: %(e)s') %
|
msg = (_('Failed to create share %(share_name)s: %(e)s') %
|
||||||
{'share_name': share_name, 'e': six.text_type(e)})
|
{'share_name': share_name, 'e': six.text_type(e)})
|
||||||
LOG.exception(msg)
|
LOG.exception(msg)
|
||||||
raise exception.ShareBackendException(msg)
|
raise exception.ShareBackendException(msg=msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = self._client.getfshare(
|
result = self._client.getfshare(
|
||||||
@ -558,14 +557,14 @@ class HPE3ParMediator(object):
|
|||||||
'%(e)s') % {'share_name': share_name,
|
'%(e)s') % {'share_name': share_name,
|
||||||
'e': six.text_type(e)})
|
'e': six.text_type(e)})
|
||||||
LOG.exception(msg)
|
LOG.exception(msg)
|
||||||
raise exception.ShareBackendException(msg)
|
raise exception.ShareBackendException(msg=msg)
|
||||||
|
|
||||||
if result['total'] != 1:
|
if result['total'] != 1:
|
||||||
msg = (_('Failed to get fshare %(share_name)s after creating it. '
|
msg = (_('Failed to get fshare %(share_name)s after creating it. '
|
||||||
'Expected to get 1 fshare. Got %(total)s.') %
|
'Expected to get 1 fshare. Got %(total)s.') %
|
||||||
{'share_name': share_name, 'total': result['total']})
|
{'share_name': share_name, 'total': result['total']})
|
||||||
LOG.error(msg)
|
LOG.error(msg)
|
||||||
raise exception.ShareBackendException(msg)
|
raise exception.ShareBackendException(msg=msg)
|
||||||
return result['members'][0]
|
return result['members'][0]
|
||||||
|
|
||||||
def create_share(self, project_id, share_id, share_proto, extra_specs,
|
def create_share(self, project_id, share_id, share_proto, extra_specs,
|
||||||
@ -616,7 +615,7 @@ class HPE3ParMediator(object):
|
|||||||
|
|
||||||
def create_share_from_snapshot(self, share_id, share_proto, extra_specs,
|
def create_share_from_snapshot(self, share_id, share_proto, extra_specs,
|
||||||
orig_project_id, orig_share_id,
|
orig_project_id, orig_share_id,
|
||||||
snapshot_id, fpg, vfs,
|
snapshot_id, fpg, vfs, ips,
|
||||||
size=None,
|
size=None,
|
||||||
comment=OPEN_STACK_MANILA):
|
comment=OPEN_STACK_MANILA):
|
||||||
|
|
||||||
@ -710,14 +709,13 @@ class HPE3ParMediator(object):
|
|||||||
self._grant_admin_smb_access(
|
self._grant_admin_smb_access(
|
||||||
protocol, fpg, vfs, fstore, temp, share=ro_share_name)
|
protocol, fpg, vfs, fstore, temp, share=ro_share_name)
|
||||||
|
|
||||||
ip = self.hpe3par_share_ip_address
|
source_locations = self.build_export_locations(
|
||||||
source_location = self.build_export_location(
|
protocol, ips, source_path)
|
||||||
protocol, ip, source_path)
|
dest_locations = self.build_export_locations(
|
||||||
dest_location = self.build_export_location(
|
protocol, ips, dest_path)
|
||||||
protocol, ip, dest_path)
|
|
||||||
|
|
||||||
self._copy_share_data(
|
self._copy_share_data(
|
||||||
share_id, source_location, dest_location, protocol)
|
share_id, source_locations[0], dest_locations[0], protocol)
|
||||||
|
|
||||||
# Revoke the admin access that was needed to copy to the dest.
|
# Revoke the admin access that was needed to copy to the dest.
|
||||||
if protocol == 'nfs':
|
if protocol == 'nfs':
|
||||||
@ -821,7 +819,7 @@ class HPE3ParMediator(object):
|
|||||||
return fstore
|
return fstore
|
||||||
|
|
||||||
def delete_share(self, project_id, share_id, share_size, share_proto,
|
def delete_share(self, project_id, share_id, share_size, share_proto,
|
||||||
fpg, vfs):
|
fpg, vfs, share_ip):
|
||||||
|
|
||||||
protocol = self.ensure_supported_protocol(share_proto)
|
protocol = self.ensure_supported_protocol(share_proto)
|
||||||
share_name = self.ensure_prefix(share_id)
|
share_name = self.ensure_prefix(share_id)
|
||||||
@ -857,7 +855,7 @@ class HPE3ParMediator(object):
|
|||||||
# reason, we will not treat this as an error_deleting
|
# reason, we will not treat this as an error_deleting
|
||||||
# issue. We will allow the delete to continue as requested.
|
# issue. We will allow the delete to continue as requested.
|
||||||
self._delete_file_tree(
|
self._delete_file_tree(
|
||||||
share_name, protocol, fpg, vfs, fstore)
|
share_name, protocol, fpg, vfs, fstore, share_ip)
|
||||||
# reduce the fsquota by share size when a tree is deleted.
|
# reduce the fsquota by share size when a tree is deleted.
|
||||||
self._update_capacity_quotas(
|
self._update_capacity_quotas(
|
||||||
fstore, 0, share_size, fpg, vfs)
|
fstore, 0, share_size, fpg, vfs)
|
||||||
@ -871,7 +869,8 @@ class HPE3ParMediator(object):
|
|||||||
}
|
}
|
||||||
LOG.warning(msg, data)
|
LOG.warning(msg, data)
|
||||||
|
|
||||||
def _delete_file_tree(self, share_name, protocol, fpg, vfs, fstore):
|
def _delete_file_tree(self, share_name, protocol, fpg, vfs, fstore,
|
||||||
|
share_ip):
|
||||||
# If the share protocol is CIFS, we need to make sure the admin
|
# If the share protocol is CIFS, we need to make sure the admin
|
||||||
# provided the proper config values. If they have not, we can simply
|
# provided the proper config values. If they have not, we can simply
|
||||||
# return out and log a warning.
|
# return out and log a warning.
|
||||||
@ -893,7 +892,8 @@ class HPE3ParMediator(object):
|
|||||||
self._create_mount_directory(mount_location)
|
self._create_mount_directory(mount_location)
|
||||||
|
|
||||||
# Mount the super share.
|
# Mount the super share.
|
||||||
self._mount_super_share(protocol, mount_location, fpg, vfs, fstore)
|
self._mount_super_share(protocol, mount_location, fpg, vfs, fstore,
|
||||||
|
share_ip)
|
||||||
|
|
||||||
# Delete the share from the super share.
|
# Delete the share from the super share.
|
||||||
self._delete_share_directory(share_dir)
|
self._delete_share_directory(share_dir)
|
||||||
@ -995,10 +995,11 @@ class HPE3ParMediator(object):
|
|||||||
'-o', cred)
|
'-o', cred)
|
||||||
utils.execute(*cmd, run_as_root=True)
|
utils.execute(*cmd, run_as_root=True)
|
||||||
|
|
||||||
def _mount_super_share(self, protocol, mount_dir, fpg, vfs, fstore):
|
def _mount_super_share(self, protocol, mount_dir, fpg, vfs, fstore,
|
||||||
|
share_ip):
|
||||||
try:
|
try:
|
||||||
mount_location = self._generate_mount_path(
|
mount_location = self._generate_mount_path(
|
||||||
protocol, fpg, vfs, fstore)
|
protocol, fpg, vfs, fstore, share_ip)
|
||||||
self._mount_share(protocol, mount_location, mount_dir)
|
self._mount_share(protocol, mount_location, mount_dir)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
message = (_LW("There was an error mounting the super share: "
|
message = (_LW("There was an error mounting the super share: "
|
||||||
@ -1027,17 +1028,17 @@ class HPE3ParMediator(object):
|
|||||||
six.text_type(err))
|
six.text_type(err))
|
||||||
LOG.warning(message)
|
LOG.warning(message)
|
||||||
|
|
||||||
def _generate_mount_path(self, protocol, fpg, vfs, fstore):
|
def _generate_mount_path(self, protocol, fpg, vfs, fstore, share_ip):
|
||||||
path = None
|
path = None
|
||||||
if protocol == 'nfs':
|
if protocol == 'nfs':
|
||||||
path = (("%(share_ip)s:/%(fpg)s/%(vfs)s/%(fstore)s/") %
|
path = (("%(share_ip)s:/%(fpg)s/%(vfs)s/%(fstore)s/") %
|
||||||
{'share_ip': self.hpe3par_share_ip_address,
|
{'share_ip': share_ip,
|
||||||
'fpg': fpg,
|
'fpg': fpg,
|
||||||
'vfs': vfs,
|
'vfs': vfs,
|
||||||
'fstore': fstore})
|
'fstore': fstore})
|
||||||
else:
|
else:
|
||||||
path = (("//%(share_ip)s/%(share_name)s/") %
|
path = (("//%(share_ip)s/%(share_name)s/") %
|
||||||
{'share_ip': self.hpe3par_share_ip_address,
|
{'share_ip': share_ip,
|
||||||
'share_name': SUPER_SHARE})
|
'share_name': SUPER_SHARE})
|
||||||
return path
|
return path
|
||||||
|
|
||||||
@ -1053,7 +1054,7 @@ class HPE3ParMediator(object):
|
|||||||
msg = (_('Exception during getvfs %(vfs)s: %(e)s') %
|
msg = (_('Exception during getvfs %(vfs)s: %(e)s') %
|
||||||
{'vfs': vfs, 'e': six.text_type(e)})
|
{'vfs': vfs, 'e': six.text_type(e)})
|
||||||
LOG.exception(msg)
|
LOG.exception(msg)
|
||||||
raise exception.ShareBackendException(msg)
|
raise exception.ShareBackendException(msg=msg)
|
||||||
|
|
||||||
if result['total'] != 1:
|
if result['total'] != 1:
|
||||||
error_msg = result.get('message')
|
error_msg = result.get('message')
|
||||||
@ -1062,7 +1063,7 @@ class HPE3ParMediator(object):
|
|||||||
'(%(fpg)s/%(vfs)s): %(msg)s') %
|
'(%(fpg)s/%(vfs)s): %(msg)s') %
|
||||||
{'fpg': fpg, 'vfs': vfs, 'msg': error_msg})
|
{'fpg': fpg, 'vfs': vfs, 'msg': error_msg})
|
||||||
LOG.error(message)
|
LOG.error(message)
|
||||||
raise exception.ShareBackendException(message)
|
raise exception.ShareBackendException(msg=message)
|
||||||
else:
|
else:
|
||||||
message = (_('Error while validating FPG/VFS '
|
message = (_('Error while validating FPG/VFS '
|
||||||
'(%(fpg)s/%(vfs)s): Expected 1, '
|
'(%(fpg)s/%(vfs)s): Expected 1, '
|
||||||
@ -1071,7 +1072,7 @@ class HPE3ParMediator(object):
|
|||||||
'total': result['total']})
|
'total': result['total']})
|
||||||
|
|
||||||
LOG.error(message)
|
LOG.error(message)
|
||||||
raise exception.ShareBackendException(message)
|
raise exception.ShareBackendException(msg=message)
|
||||||
|
|
||||||
return result['members'][0]
|
return result['members'][0]
|
||||||
|
|
||||||
@ -1151,7 +1152,7 @@ class HPE3ParMediator(object):
|
|||||||
'Cannot delete snapshot without checking for '
|
'Cannot delete snapshot without checking for '
|
||||||
'dependent shares first: %s') % six.text_type(e))
|
'dependent shares first: %s') % six.text_type(e))
|
||||||
LOG.exception(msg)
|
LOG.exception(msg)
|
||||||
raise exception.ShareBackendException(msg)
|
raise exception.ShareBackendException(msg=msg)
|
||||||
|
|
||||||
for share in shares['members']:
|
for share in shares['members']:
|
||||||
if protocol == 'nfs':
|
if protocol == 'nfs':
|
||||||
@ -1189,7 +1190,7 @@ class HPE3ParMediator(object):
|
|||||||
'snapname': snapname,
|
'snapname': snapname,
|
||||||
'e': six.text_type(e)})
|
'e': six.text_type(e)})
|
||||||
LOG.exception(msg)
|
LOG.exception(msg)
|
||||||
raise exception.ShareBackendException(msg)
|
raise exception.ShareBackendException(msg=msg)
|
||||||
|
|
||||||
# Try to reclaim the space
|
# Try to reclaim the space
|
||||||
try:
|
try:
|
||||||
@ -1207,13 +1208,13 @@ class HPE3ParMediator(object):
|
|||||||
msg = (_("Invalid access type. Expected 'ip' or 'user'. "
|
msg = (_("Invalid access type. Expected 'ip' or 'user'. "
|
||||||
"Actual '%s'.") % access_type)
|
"Actual '%s'.") % access_type)
|
||||||
LOG.error(msg)
|
LOG.error(msg)
|
||||||
raise exception.InvalidInput(msg)
|
raise exception.InvalidInput(reason=msg)
|
||||||
|
|
||||||
if protocol == 'nfs' and access_type != 'ip':
|
if protocol == 'nfs' and access_type != 'ip':
|
||||||
msg = (_("Invalid NFS access type. HPE 3PAR NFS supports 'ip'. "
|
msg = (_("Invalid NFS access type. HPE 3PAR NFS supports 'ip'. "
|
||||||
"Actual '%s'.") % access_type)
|
"Actual '%s'.") % access_type)
|
||||||
LOG.error(msg)
|
LOG.error(msg)
|
||||||
raise exception.HPE3ParInvalid(msg)
|
raise exception.HPE3ParInvalid(err=msg)
|
||||||
|
|
||||||
return protocol
|
return protocol
|
||||||
|
|
||||||
@ -1496,7 +1497,7 @@ class HPE3ParMediator(object):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
msg = (_('Unexpected exception while getting snapshots: %s') %
|
msg = (_('Unexpected exception while getting snapshots: %s') %
|
||||||
six.text_type(e))
|
six.text_type(e))
|
||||||
raise exception.ShareBackendException(msg)
|
raise exception.ShareBackendException(msg=msg)
|
||||||
|
|
||||||
def update_access(self, project_id, share_id, share_proto, extra_specs,
|
def update_access(self, project_id, share_id, share_proto, extra_specs,
|
||||||
access_rules, add_rules, delete_rules, fpg, vfs):
|
access_rules, add_rules, delete_rules, fpg, vfs):
|
||||||
|
@ -479,19 +479,27 @@ class ShareManager(manager.SchedulerDependentManager):
|
|||||||
with_share_data=True
|
with_share_data=True
|
||||||
)
|
)
|
||||||
if create_on_backend:
|
if create_on_backend:
|
||||||
|
metadata = {'request_host': share_instance['host']}
|
||||||
compatible_share_server = (
|
compatible_share_server = (
|
||||||
self._create_share_server_in_backend(
|
self._create_share_server_in_backend(
|
||||||
context, compatible_share_server))
|
context, compatible_share_server,
|
||||||
|
metadata=metadata))
|
||||||
|
|
||||||
return compatible_share_server, share_instance_ref
|
return compatible_share_server, share_instance_ref
|
||||||
|
|
||||||
return _provide_share_server_for_share()
|
return _provide_share_server_for_share()
|
||||||
|
|
||||||
def _create_share_server_in_backend(self, context, share_server):
|
def _create_share_server_in_backend(self, context, share_server,
|
||||||
|
metadata=None):
|
||||||
|
"""Perform setup_server on backend
|
||||||
|
|
||||||
|
:param metadata: A dictionary, to be passed to driver's setup_server()
|
||||||
|
"""
|
||||||
|
|
||||||
if share_server['status'] == constants.STATUS_CREATING:
|
if share_server['status'] == constants.STATUS_CREATING:
|
||||||
# Create share server on backend with data from db.
|
# Create share server on backend with data from db.
|
||||||
share_server = self._setup_server(context, share_server)
|
share_server = self._setup_server(context, share_server,
|
||||||
|
metadata=metadata)
|
||||||
LOG.info(_LI("Share server created successfully."))
|
LOG.info(_LI("Share server created successfully."))
|
||||||
else:
|
else:
|
||||||
LOG.info(_LI("Using preexisting share server: "
|
LOG.info(_LI("Using preexisting share server: "
|
||||||
|
@ -40,6 +40,7 @@ EXPECTED_IP_127_2 = '127.0.0.2'
|
|||||||
EXPECTED_ACCESS_LEVEL = 'foo_access'
|
EXPECTED_ACCESS_LEVEL = 'foo_access'
|
||||||
EXPECTED_SUBNET = '255.255.255.0' # based on CIDR_PREFIX above
|
EXPECTED_SUBNET = '255.255.255.0' # based on CIDR_PREFIX above
|
||||||
EXPECTED_VLAN_TYPE = 'vlan'
|
EXPECTED_VLAN_TYPE = 'vlan'
|
||||||
|
EXPECTED_VXLAN_TYPE = 'vxlan'
|
||||||
EXPECTED_VLAN_TAG = '101'
|
EXPECTED_VLAN_TAG = '101'
|
||||||
EXPECTED_SERVER_ID = '1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e'
|
EXPECTED_SERVER_ID = '1a1a1a1a-2b2b-3c3c-4d4d-5e5e5e5e5e5e'
|
||||||
EXPECTED_PROJECT_ID = 'osf-nfs-project-id'
|
EXPECTED_PROJECT_ID = 'osf-nfs-project-id'
|
||||||
@ -47,16 +48,25 @@ SHARE_ID = 'share-id'
|
|||||||
EXPECTED_SHARE_ID = 'osf-share-id'
|
EXPECTED_SHARE_ID = 'osf-share-id'
|
||||||
EXPECTED_SHARE_ID_RO = 'osf-ro-share-id'
|
EXPECTED_SHARE_ID_RO = 'osf-ro-share-id'
|
||||||
EXPECTED_SHARE_NAME = 'share-name'
|
EXPECTED_SHARE_NAME = 'share-name'
|
||||||
EXPECTED_HOST = 'hostname@backend#pool'
|
EXPECTED_FPG = 'pool'
|
||||||
|
EXPECTED_HOST = 'hostname@backend#' + EXPECTED_FPG
|
||||||
|
UNEXPECTED_FPG = 'not_a_pool'
|
||||||
|
UNEXPECTED_HOST = 'hostname@backend#' + UNEXPECTED_FPG
|
||||||
|
HOST_WITHOUT_POOL_1 = 'hostname@backend'
|
||||||
|
HOST_WITHOUT_POOL_2 = 'hostname@backend#'
|
||||||
EXPECTED_SHARE_PATH = '/anyfpg/anyvfs/anyfstore'
|
EXPECTED_SHARE_PATH = '/anyfpg/anyvfs/anyfstore'
|
||||||
EXPECTED_SIZE_1 = 1
|
EXPECTED_SIZE_1 = 1
|
||||||
EXPECTED_SIZE_2 = 2
|
EXPECTED_SIZE_2 = 2
|
||||||
EXPECTED_SNAP_NAME = 'osf-snap-name'
|
EXPECTED_SNAP_NAME = 'osf-snap-name'
|
||||||
EXPECTED_SNAP_ID = 'osf-snap-id'
|
EXPECTED_SNAP_ID = 'osf-snap-id'
|
||||||
EXPECTED_STATS = {'test': 'stats'}
|
EXPECTED_STATS = {'test': 'stats'}
|
||||||
EXPECTED_FPG = 'FPG_1'
|
EXPECTED_FPG_CONF = [{EXPECTED_FPG: [EXPECTED_IP_10203040]}]
|
||||||
EXPECTED_FSTORE = EXPECTED_PROJECT_ID
|
EXPECTED_FSTORE = EXPECTED_PROJECT_ID
|
||||||
EXPECTED_VFS = 'test_vfs'
|
EXPECTED_VFS = 'test_vfs'
|
||||||
|
EXPECTED_GET_VFS = {'vfsname': EXPECTED_VFS,
|
||||||
|
'vfsip': {'address': EXPECTED_IP_10203040}}
|
||||||
|
EXPECTED_FPG_MAP = {EXPECTED_FPG: {EXPECTED_VFS: [EXPECTED_IP_10203040]}}
|
||||||
|
EXPECTED_SHARE_IP = '10.50.3.8'
|
||||||
EXPECTED_HPE_DEBUG = True
|
EXPECTED_HPE_DEBUG = True
|
||||||
EXPECTED_COMMENT = "OpenStack Manila - foo-comment"
|
EXPECTED_COMMENT = "OpenStack Manila - foo-comment"
|
||||||
EXPECTED_EXTRA_SPECS = {}
|
EXPECTED_EXTRA_SPECS = {}
|
||||||
@ -67,6 +77,14 @@ EXPECTED_SUPER_SHARE_COMMENT = ('OpenStack super share used to delete nested '
|
|||||||
EXPECTED_CIFS_DOMAIN = 'LOCAL_CLUSTER'
|
EXPECTED_CIFS_DOMAIN = 'LOCAL_CLUSTER'
|
||||||
EXPECTED_MOUNT_PATH = '/mnt/'
|
EXPECTED_MOUNT_PATH = '/mnt/'
|
||||||
|
|
||||||
|
SHARE_SERVER = {
|
||||||
|
'backend_details': {
|
||||||
|
'ip': EXPECTED_IP_10203040,
|
||||||
|
'fpg': EXPECTED_FPG,
|
||||||
|
'vfs': EXPECTED_VFS,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
# Access rules. Allow for overwrites.
|
# Access rules. Allow for overwrites.
|
||||||
ACCESS_RULE_NFS = {
|
ACCESS_RULE_NFS = {
|
||||||
'access_type': IP,
|
'access_type': IP,
|
||||||
@ -148,12 +166,7 @@ NFS_SHARE_INFO = {
|
|||||||
'share_proto': NFS,
|
'share_proto': NFS,
|
||||||
'export_location': EXPECTED_LOCATION,
|
'export_location': EXPECTED_LOCATION,
|
||||||
'size': 1234,
|
'size': 1234,
|
||||||
}
|
'host': EXPECTED_HOST,
|
||||||
|
|
||||||
ACCESS_INFO = {
|
|
||||||
'access_type': IP,
|
|
||||||
'access_to': EXPECTED_IP_1234,
|
|
||||||
'access_level': READ_WRITE,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SNAPSHOT_INFO = {
|
SNAPSHOT_INFO = {
|
||||||
@ -164,6 +177,7 @@ SNAPSHOT_INFO = {
|
|||||||
'id': EXPECTED_SHARE_ID,
|
'id': EXPECTED_SHARE_ID,
|
||||||
'share_proto': NFS,
|
'share_proto': NFS,
|
||||||
'export_location': EXPECTED_LOCATION,
|
'export_location': EXPECTED_LOCATION,
|
||||||
|
'host': EXPECTED_HOST,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,6 +12,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
from copy import deepcopy
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import ddt
|
import ddt
|
||||||
@ -26,6 +27,43 @@ from manila import test
|
|||||||
from manila.tests.share.drivers.hpe import test_hpe_3par_constants as constants
|
from manila.tests.share.drivers.hpe import test_hpe_3par_constants as constants
|
||||||
|
|
||||||
|
|
||||||
|
@ddt.ddt
|
||||||
|
class HPE3ParDriverFPGTestCase(test.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(HPE3ParDriverFPGTestCase, self).setUp()
|
||||||
|
|
||||||
|
@ddt.data((-1, 4),
|
||||||
|
(0, 5),
|
||||||
|
(0, -1))
|
||||||
|
@ddt.unpack
|
||||||
|
def test_FPG_init_args_failure(self, min_ip, max_ip):
|
||||||
|
self.assertRaises(exception.HPE3ParInvalid,
|
||||||
|
hpe3pardriver.FPG, min_ip, max_ip)
|
||||||
|
|
||||||
|
@ddt.data(('invalid_ip_fpg, 10.256.0.1', 0, 4),
|
||||||
|
(None, 0, 4),
|
||||||
|
(' ', 0, 4),
|
||||||
|
('', 0, 4),
|
||||||
|
('max_ip_fpg, 10.0.0.1, 10.0.0.2, 10.0.0.3, 10.0.0.4, 10.0.0.5',
|
||||||
|
0, 4),
|
||||||
|
('min_1_ip_fpg', 1, 4))
|
||||||
|
@ddt.unpack
|
||||||
|
def test_FPG_type_failures(self, value, min_ip, max_ip):
|
||||||
|
fpg_type_obj = hpe3pardriver.FPG(min_ip=min_ip, max_ip=max_ip)
|
||||||
|
self.assertRaises(exception.HPE3ParInvalid, fpg_type_obj, value)
|
||||||
|
|
||||||
|
@ddt.data(('samplefpg, 10.0.0.1', {'samplefpg': ['10.0.0.1']}),
|
||||||
|
('samplefpg', {'samplefpg': []}),
|
||||||
|
('samplefpg, 10.0.0.1, 10.0.0.2',
|
||||||
|
{'samplefpg': ['10.0.0.1', '10.0.0.2']}))
|
||||||
|
@ddt.unpack
|
||||||
|
def test_FPG_type_success(self, value, expected_fpg):
|
||||||
|
fpg_type_obj = hpe3pardriver.FPG()
|
||||||
|
fpg = fpg_type_obj(value)
|
||||||
|
self.assertEqual(expected_fpg, fpg)
|
||||||
|
|
||||||
|
|
||||||
@ddt.ddt
|
@ddt.ddt
|
||||||
class HPE3ParDriverTestCase(test.TestCase):
|
class HPE3ParDriverTestCase(test.TestCase):
|
||||||
|
|
||||||
@ -42,13 +80,11 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
self.conf.hpe3par_san_login = constants.SAN_LOGIN
|
self.conf.hpe3par_san_login = constants.SAN_LOGIN
|
||||||
self.conf.hpe3par_san_password = constants.SAN_PASSWORD
|
self.conf.hpe3par_san_password = constants.SAN_PASSWORD
|
||||||
self.conf.hpe3par_san_ip = constants.EXPECTED_IP_1234
|
self.conf.hpe3par_san_ip = constants.EXPECTED_IP_1234
|
||||||
self.conf.hpe3par_fpg = constants.EXPECTED_FPG
|
self.conf.hpe3par_fpg = constants.EXPECTED_FPG_CONF
|
||||||
self.conf.hpe3par_san_ssh_port = constants.PORT
|
self.conf.hpe3par_san_ssh_port = constants.PORT
|
||||||
self.conf.ssh_conn_timeout = constants.TIMEOUT
|
self.conf.ssh_conn_timeout = constants.TIMEOUT
|
||||||
self.conf.hpe3par_share_ip_address = None
|
|
||||||
self.conf.hpe3par_fstore_per_share = False
|
self.conf.hpe3par_fstore_per_share = False
|
||||||
self.conf.hpe3par_require_cifs_ip = False
|
self.conf.hpe3par_require_cifs_ip = False
|
||||||
self.conf.hpe3par_share_ip_address = constants.EXPECTED_IP_10203040
|
|
||||||
self.conf.hpe3par_cifs_admin_access_username = constants.USERNAME,
|
self.conf.hpe3par_cifs_admin_access_username = constants.USERNAME,
|
||||||
self.conf.hpe3par_cifs_admin_access_password = constants.PASSWORD,
|
self.conf.hpe3par_cifs_admin_access_password = constants.PASSWORD,
|
||||||
self.conf.hpe3par_cifs_admin_access_domain = (
|
self.conf.hpe3par_cifs_admin_access_domain = (
|
||||||
@ -75,8 +111,8 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
# restore needed static methods
|
# restore needed static methods
|
||||||
self.mock_mediator.ensure_supported_protocol = (
|
self.mock_mediator.ensure_supported_protocol = (
|
||||||
self.real_hpe_3par_mediator.ensure_supported_protocol)
|
self.real_hpe_3par_mediator.ensure_supported_protocol)
|
||||||
self.mock_mediator.build_export_location = (
|
self.mock_mediator.build_export_locations = (
|
||||||
self.real_hpe_3par_mediator.build_export_location)
|
self.real_hpe_3par_mediator.build_export_locations)
|
||||||
|
|
||||||
self.driver = hpe3pardriver.HPE3ParShareDriver(
|
self.driver = hpe3pardriver.HPE3ParShareDriver(
|
||||||
configuration=self.conf)
|
configuration=self.conf)
|
||||||
@ -84,7 +120,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
def test_driver_setup_success(self):
|
def test_driver_setup_success(self):
|
||||||
"""Driver do_setup without any errors."""
|
"""Driver do_setup without any errors."""
|
||||||
|
|
||||||
self.mock_mediator.get_vfs_name.return_value = constants.EXPECTED_VFS
|
self.mock_mediator.get_vfs.return_value = constants.EXPECTED_GET_VFS
|
||||||
|
|
||||||
self.driver.do_setup(None)
|
self.driver.do_setup(None)
|
||||||
conf = self.conf
|
conf = self.conf
|
||||||
@ -99,8 +135,6 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
hpe3par_san_ip=conf.hpe3par_san_ip,
|
hpe3par_san_ip=conf.hpe3par_san_ip,
|
||||||
hpe3par_fstore_per_share=conf.hpe3par_fstore_per_share,
|
hpe3par_fstore_per_share=conf.hpe3par_fstore_per_share,
|
||||||
hpe3par_require_cifs_ip=conf.hpe3par_require_cifs_ip,
|
hpe3par_require_cifs_ip=conf.hpe3par_require_cifs_ip,
|
||||||
hpe3par_share_ip_address=(
|
|
||||||
self.conf.hpe3par_share_ip_address),
|
|
||||||
hpe3par_cifs_admin_access_username=(
|
hpe3par_cifs_admin_access_username=(
|
||||||
conf.hpe3par_cifs_admin_access_username),
|
conf.hpe3par_cifs_admin_access_username),
|
||||||
hpe3par_cifs_admin_access_password=(
|
hpe3par_cifs_admin_access_password=(
|
||||||
@ -113,27 +147,55 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
|
|
||||||
self.mock_mediator.assert_has_calls([
|
self.mock_mediator.assert_has_calls([
|
||||||
mock.call.do_setup(),
|
mock.call.do_setup(),
|
||||||
mock.call.get_vfs_name(conf.hpe3par_fpg)])
|
mock.call.get_vfs(constants.EXPECTED_FPG)])
|
||||||
|
|
||||||
self.assertEqual(constants.EXPECTED_VFS, self.driver.vfs)
|
def test_driver_setup_dhss_success(self):
|
||||||
|
"""Driver do_setup without any errors with dhss=True."""
|
||||||
|
|
||||||
|
self.test_driver_setup_success()
|
||||||
|
self.assertEqual(constants.EXPECTED_FPG_MAP, self.driver.fpgs)
|
||||||
|
|
||||||
def test_driver_setup_no_dhss_success(self):
|
def test_driver_setup_no_dhss_success(self):
|
||||||
"""Driver do_setup without any errors with dhss=False."""
|
"""Driver do_setup without any errors with dhss=False."""
|
||||||
|
|
||||||
self.conf.driver_handles_share_servers = False
|
self.conf.driver_handles_share_servers = False
|
||||||
self.conf.hpe3par_share_ip_address = constants.EXPECTED_IP_10203040
|
self.test_driver_setup_success()
|
||||||
|
self.assertEqual(constants.EXPECTED_FPG_MAP, self.driver.fpgs)
|
||||||
|
|
||||||
|
def test_driver_setup_success_no_dhss_no_conf_ss_ip(self):
|
||||||
|
"""test driver's do_setup()
|
||||||
|
|
||||||
|
Driver do_setup with dhss=False, share server ip not set in config file
|
||||||
|
but discoverable at 3par array
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.conf.driver_handles_share_servers = False
|
||||||
|
# ss ip not provided in conf
|
||||||
|
original_fpg = deepcopy(self.conf.hpe3par_fpg)
|
||||||
|
self.conf.hpe3par_fpg[0][constants.EXPECTED_FPG] = []
|
||||||
|
|
||||||
self.test_driver_setup_success()
|
self.test_driver_setup_success()
|
||||||
|
|
||||||
def test_driver_setup_no_ss_no_ip(self):
|
self.assertEqual(constants.EXPECTED_FPG_MAP, self.driver.fpgs)
|
||||||
|
self.conf.hpe3par_fpg = original_fpg
|
||||||
|
|
||||||
|
def test_driver_setup_failure_no_dhss_no_conf_ss_ip(self):
|
||||||
"""Configured IP address is required for dhss=False."""
|
"""Configured IP address is required for dhss=False."""
|
||||||
|
|
||||||
self.conf.driver_handles_share_servers = False
|
self.conf.driver_handles_share_servers = False
|
||||||
self.conf.hpe3par_share_ip_address = None
|
# ss ip not provided in conf
|
||||||
|
fpg_without_ss_ip = deepcopy(self.conf.hpe3par_fpg)
|
||||||
|
self.conf.hpe3par_fpg[0][constants.EXPECTED_FPG] = []
|
||||||
|
# ss ip not configured on array
|
||||||
|
vfs_without_ss_ip = deepcopy(constants.EXPECTED_GET_VFS)
|
||||||
|
vfs_without_ss_ip['vfsip']['address'] = []
|
||||||
|
self.mock_mediator.get_vfs.return_value = vfs_without_ss_ip
|
||||||
|
|
||||||
self.assertRaises(exception.HPE3ParInvalid,
|
self.assertRaises(exception.HPE3ParInvalid,
|
||||||
self.driver.do_setup, None)
|
self.driver.do_setup, None)
|
||||||
|
self.conf.hpe3par_fpg = fpg_without_ss_ip
|
||||||
|
|
||||||
def test_driver_with_setup_error(self):
|
def test_driver_setup_mediator_error(self):
|
||||||
"""Driver do_setup when the mediator setup fails."""
|
"""Driver do_setup when the mediator setup fails."""
|
||||||
|
|
||||||
self.mock_mediator.do_setup.side_effect = (
|
self.mock_mediator.do_setup.side_effect = (
|
||||||
@ -154,8 +216,6 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
hpe3par_san_ip=conf.hpe3par_san_ip,
|
hpe3par_san_ip=conf.hpe3par_san_ip,
|
||||||
hpe3par_fstore_per_share=conf.hpe3par_fstore_per_share,
|
hpe3par_fstore_per_share=conf.hpe3par_fstore_per_share,
|
||||||
hpe3par_require_cifs_ip=conf.hpe3par_require_cifs_ip,
|
hpe3par_require_cifs_ip=conf.hpe3par_require_cifs_ip,
|
||||||
hpe3par_share_ip_address=(
|
|
||||||
self.conf.hpe3par_share_ip_address),
|
|
||||||
hpe3par_cifs_admin_access_username=(
|
hpe3par_cifs_admin_access_username=(
|
||||||
conf.hpe3par_cifs_admin_access_username),
|
conf.hpe3par_cifs_admin_access_username),
|
||||||
hpe3par_cifs_admin_access_password=(
|
hpe3par_cifs_admin_access_password=(
|
||||||
@ -168,10 +228,10 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
|
|
||||||
self.mock_mediator.assert_has_calls([mock.call.do_setup()])
|
self.mock_mediator.assert_has_calls([mock.call.do_setup()])
|
||||||
|
|
||||||
def test_driver_with_vfs_error(self):
|
def test_driver_setup_with_vfs_error(self):
|
||||||
"""Driver do_setup when the get_vfs_name fails."""
|
"""Driver do_setup when the get_vfs fails."""
|
||||||
|
|
||||||
self.mock_mediator.get_vfs_name.side_effect = (
|
self.mock_mediator.get_vfs.side_effect = (
|
||||||
exception.ShareBackendException('fail'))
|
exception.ShareBackendException('fail'))
|
||||||
|
|
||||||
self.assertRaises(exception.ShareBackendException,
|
self.assertRaises(exception.ShareBackendException,
|
||||||
@ -189,8 +249,6 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
hpe3par_san_ip=conf.hpe3par_san_ip,
|
hpe3par_san_ip=conf.hpe3par_san_ip,
|
||||||
hpe3par_fstore_per_share=conf.hpe3par_fstore_per_share,
|
hpe3par_fstore_per_share=conf.hpe3par_fstore_per_share,
|
||||||
hpe3par_require_cifs_ip=conf.hpe3par_require_cifs_ip,
|
hpe3par_require_cifs_ip=conf.hpe3par_require_cifs_ip,
|
||||||
hpe3par_share_ip_address=(
|
|
||||||
self.conf.hpe3par_share_ip_address),
|
|
||||||
hpe3par_cifs_admin_access_username=(
|
hpe3par_cifs_admin_access_username=(
|
||||||
conf.hpe3par_cifs_admin_access_username),
|
conf.hpe3par_cifs_admin_access_username),
|
||||||
hpe3par_cifs_admin_access_password=(
|
hpe3par_cifs_admin_access_password=(
|
||||||
@ -203,64 +261,53 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
|
|
||||||
self.mock_mediator.assert_has_calls([
|
self.mock_mediator.assert_has_calls([
|
||||||
mock.call.do_setup(),
|
mock.call.do_setup(),
|
||||||
mock.call.get_vfs_name(conf.hpe3par_fpg)])
|
mock.call.get_vfs(constants.EXPECTED_FPG)])
|
||||||
|
|
||||||
|
def test_driver_setup_conf_ips_validation_fails(self):
|
||||||
|
"""Driver do_setup when the _validate_pool_ips fails."""
|
||||||
|
|
||||||
|
self.conf.driver_handles_share_servers = False
|
||||||
|
vfs_with_ss_ip = deepcopy(constants.EXPECTED_GET_VFS)
|
||||||
|
vfs_with_ss_ip['vfsip']['address'] = ['10.100.100.100']
|
||||||
|
self.mock_mediator.get_vfs.return_value = vfs_with_ss_ip
|
||||||
|
self.assertRaises(exception.HPE3ParInvalid,
|
||||||
|
self.driver.do_setup, None)
|
||||||
|
|
||||||
|
conf = self.conf
|
||||||
|
self.mock_mediator_constructor.assert_has_calls([
|
||||||
|
mock.call(hpe3par_san_ssh_port=conf.hpe3par_san_ssh_port,
|
||||||
|
hpe3par_san_password=conf.hpe3par_san_password,
|
||||||
|
hpe3par_username=conf.hpe3par_username,
|
||||||
|
hpe3par_san_login=conf.hpe3par_san_login,
|
||||||
|
hpe3par_debug=conf.hpe3par_debug,
|
||||||
|
hpe3par_api_url=conf.hpe3par_api_url,
|
||||||
|
hpe3par_password=conf.hpe3par_password,
|
||||||
|
hpe3par_san_ip=conf.hpe3par_san_ip,
|
||||||
|
hpe3par_fstore_per_share=conf.hpe3par_fstore_per_share,
|
||||||
|
hpe3par_require_cifs_ip=conf.hpe3par_require_cifs_ip,
|
||||||
|
hpe3par_cifs_admin_access_username=(
|
||||||
|
conf.hpe3par_cifs_admin_access_username),
|
||||||
|
hpe3par_cifs_admin_access_password=(
|
||||||
|
conf.hpe3par_cifs_admin_access_password),
|
||||||
|
hpe3par_cifs_admin_access_domain=(
|
||||||
|
conf.hpe3par_cifs_admin_access_domain),
|
||||||
|
hpe3par_share_mount_path=conf.hpe3par_share_mount_path,
|
||||||
|
my_ip=self.conf.my_ip,
|
||||||
|
ssh_conn_timeout=conf.ssh_conn_timeout)])
|
||||||
|
|
||||||
|
self.mock_mediator.assert_has_calls([
|
||||||
|
mock.call.do_setup(),
|
||||||
|
mock.call.get_vfs(constants.EXPECTED_FPG)])
|
||||||
|
|
||||||
def init_driver(self):
|
def init_driver(self):
|
||||||
"""Simple driver setup for re-use with tests that need one."""
|
"""Simple driver setup for re-use with tests that need one."""
|
||||||
|
|
||||||
self.driver._hpe3par = self.mock_mediator
|
self.driver._hpe3par = self.mock_mediator
|
||||||
self.driver.vfs = constants.EXPECTED_VFS
|
self.driver.fpgs = constants.EXPECTED_FPG_MAP
|
||||||
self.driver.fpg = constants.EXPECTED_FPG
|
|
||||||
self.mock_object(hpe3pardriver, 'share_types')
|
self.mock_object(hpe3pardriver, 'share_types')
|
||||||
get_extra_specs = hpe3pardriver.share_types.get_extra_specs_from_share
|
get_extra_specs = hpe3pardriver.share_types.get_extra_specs_from_share
|
||||||
get_extra_specs.return_value = constants.EXPECTED_EXTRA_SPECS
|
get_extra_specs.return_value = constants.EXPECTED_EXTRA_SPECS
|
||||||
|
|
||||||
def do_create_share(self, protocol, share_type_id, expected_project_id,
|
|
||||||
expected_share_id, expected_size):
|
|
||||||
"""Re-usable code for create share."""
|
|
||||||
context = None
|
|
||||||
share_server = {
|
|
||||||
'backend_details': {'ip': constants.EXPECTED_IP_10203040}}
|
|
||||||
share = {
|
|
||||||
'display_name': constants.EXPECTED_SHARE_NAME,
|
|
||||||
'host': constants.EXPECTED_HOST,
|
|
||||||
'project_id': expected_project_id,
|
|
||||||
'id': expected_share_id,
|
|
||||||
'share_proto': protocol,
|
|
||||||
'share_type_id': share_type_id,
|
|
||||||
'size': expected_size,
|
|
||||||
}
|
|
||||||
location = self.driver.create_share(context, share, share_server)
|
|
||||||
return location
|
|
||||||
|
|
||||||
def do_create_share_from_snapshot(self,
|
|
||||||
protocol,
|
|
||||||
share_type_id,
|
|
||||||
snapshot_instance,
|
|
||||||
expected_share_id,
|
|
||||||
expected_size):
|
|
||||||
"""Re-usable code for create share from snapshot."""
|
|
||||||
context = None
|
|
||||||
share_server = {
|
|
||||||
'backend_details': {
|
|
||||||
'ip': constants.EXPECTED_IP_10203040,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
share = {
|
|
||||||
'project_id': constants.EXPECTED_PROJECT_ID,
|
|
||||||
'display_name': constants.EXPECTED_SHARE_NAME,
|
|
||||||
'host': constants.EXPECTED_HOST,
|
|
||||||
'id': expected_share_id,
|
|
||||||
'share_proto': protocol,
|
|
||||||
'share_type_id': share_type_id,
|
|
||||||
'size': expected_size,
|
|
||||||
}
|
|
||||||
location = self.driver.create_share_from_snapshot(context,
|
|
||||||
share,
|
|
||||||
snapshot_instance,
|
|
||||||
share_server)
|
|
||||||
return location
|
|
||||||
|
|
||||||
def test_driver_check_for_setup_error_success(self):
|
def test_driver_check_for_setup_error_success(self):
|
||||||
"""check_for_setup_error when things go well."""
|
"""check_for_setup_error when things go well."""
|
||||||
|
|
||||||
@ -289,6 +336,98 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
]
|
]
|
||||||
hpe3pardriver.LOG.assert_has_calls(expected_calls)
|
hpe3pardriver.LOG.assert_has_calls(expected_calls)
|
||||||
|
|
||||||
|
@ddt.data(([constants.SHARE_SERVER], constants.SHARE_SERVER),
|
||||||
|
([], None),)
|
||||||
|
@ddt.unpack
|
||||||
|
def test_choose_share_server_compatible_with_share(self, share_servers,
|
||||||
|
expected_share_sever):
|
||||||
|
context = None
|
||||||
|
share_server = self.driver.choose_share_server_compatible_with_share(
|
||||||
|
context,
|
||||||
|
share_servers,
|
||||||
|
constants.NFS_SHARE_INFO,
|
||||||
|
None,
|
||||||
|
None)
|
||||||
|
|
||||||
|
self.assertEqual(expected_share_sever, share_server)
|
||||||
|
|
||||||
|
def test_choose_share_server_compatible_with_share_with_cg(self):
|
||||||
|
context = None
|
||||||
|
cg_ref = {'id': 'dummy'}
|
||||||
|
self.assertRaises(
|
||||||
|
exception.InvalidRequest,
|
||||||
|
self.driver.choose_share_server_compatible_with_share,
|
||||||
|
context,
|
||||||
|
[constants.SHARE_SERVER],
|
||||||
|
constants.NFS_SHARE_INFO,
|
||||||
|
None,
|
||||||
|
cg_ref)
|
||||||
|
|
||||||
|
def do_create_share(self, protocol, share_type_id, expected_project_id,
|
||||||
|
expected_share_id, expected_size):
|
||||||
|
"""Re-usable code for create share."""
|
||||||
|
context = None
|
||||||
|
|
||||||
|
share = {
|
||||||
|
'display_name': constants.EXPECTED_SHARE_NAME,
|
||||||
|
'host': constants.EXPECTED_HOST,
|
||||||
|
'project_id': expected_project_id,
|
||||||
|
'id': expected_share_id,
|
||||||
|
'share_proto': protocol,
|
||||||
|
'share_type_id': share_type_id,
|
||||||
|
'size': expected_size,
|
||||||
|
}
|
||||||
|
location = self.driver.create_share(context, share,
|
||||||
|
constants.SHARE_SERVER)
|
||||||
|
return location
|
||||||
|
|
||||||
|
def do_create_share_from_snapshot(self,
|
||||||
|
protocol,
|
||||||
|
share_type_id,
|
||||||
|
snapshot_instance,
|
||||||
|
expected_share_id,
|
||||||
|
expected_size):
|
||||||
|
"""Re-usable code for create share from snapshot."""
|
||||||
|
context = None
|
||||||
|
share = {
|
||||||
|
'project_id': constants.EXPECTED_PROJECT_ID,
|
||||||
|
'display_name': constants.EXPECTED_SHARE_NAME,
|
||||||
|
'host': constants.EXPECTED_HOST,
|
||||||
|
'id': expected_share_id,
|
||||||
|
'share_proto': protocol,
|
||||||
|
'share_type_id': share_type_id,
|
||||||
|
'size': expected_size,
|
||||||
|
}
|
||||||
|
location = self.driver.create_share_from_snapshot(
|
||||||
|
context,
|
||||||
|
share,
|
||||||
|
snapshot_instance,
|
||||||
|
constants.SHARE_SERVER)
|
||||||
|
return location
|
||||||
|
|
||||||
|
@ddt.data((constants.UNEXPECTED_HOST, exception.InvalidHost),
|
||||||
|
(constants.HOST_WITHOUT_POOL_1, exception.InvalidHost),
|
||||||
|
(constants.HOST_WITHOUT_POOL_2, exception.InvalidHost))
|
||||||
|
@ddt.unpack
|
||||||
|
def test_driver_create_share_fails_get_pool_location(self, host,
|
||||||
|
expected_exception):
|
||||||
|
"""get_pool_location fails to extract pool name from host"""
|
||||||
|
self.init_driver()
|
||||||
|
context = None
|
||||||
|
share_server = None
|
||||||
|
share = {
|
||||||
|
'display_name': constants.EXPECTED_SHARE_NAME,
|
||||||
|
'host': host,
|
||||||
|
'project_id': constants.EXPECTED_PROJECT_ID,
|
||||||
|
'id': constants.EXPECTED_SHARE_ID,
|
||||||
|
'share_proto': constants.CIFS,
|
||||||
|
'share_type_id': constants.SHARE_TYPE_ID,
|
||||||
|
'size': constants.EXPECTED_SIZE_2,
|
||||||
|
}
|
||||||
|
self.assertRaises(expected_exception,
|
||||||
|
self.driver.create_share,
|
||||||
|
context, share, share_server)
|
||||||
|
|
||||||
def test_driver_create_cifs_share(self):
|
def test_driver_create_cifs_share(self):
|
||||||
self.init_driver()
|
self.init_driver()
|
||||||
|
|
||||||
@ -306,7 +445,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SIZE_2)
|
constants.EXPECTED_SIZE_2)
|
||||||
|
|
||||||
self.assertEqual(expected_location, location)
|
self.assertIn(expected_location, location)
|
||||||
expected_calls = [mock.call.create_share(
|
expected_calls = [mock.call.create_share(
|
||||||
constants.EXPECTED_PROJECT_ID,
|
constants.EXPECTED_PROJECT_ID,
|
||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
@ -334,7 +473,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SIZE_1)
|
constants.EXPECTED_SIZE_1)
|
||||||
|
|
||||||
self.assertEqual(expected_location, location)
|
self.assertIn(expected_location, location)
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.create_share(constants.EXPECTED_PROJECT_ID,
|
mock.call.create_share(constants.EXPECTED_PROJECT_ID,
|
||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
@ -367,7 +506,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SIZE_2)
|
constants.EXPECTED_SIZE_2)
|
||||||
|
|
||||||
self.assertEqual(expected_location, location)
|
self.assertIn(expected_location, location)
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.create_share_from_snapshot(
|
mock.call.create_share_from_snapshot(
|
||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
@ -378,6 +517,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SNAP_ID,
|
constants.EXPECTED_SNAP_ID,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS,
|
constants.EXPECTED_VFS,
|
||||||
|
[constants.EXPECTED_IP_10203040],
|
||||||
comment=mock.ANY,
|
comment=mock.ANY,
|
||||||
size=constants.EXPECTED_SIZE_2),
|
size=constants.EXPECTED_SIZE_2),
|
||||||
]
|
]
|
||||||
@ -400,7 +540,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SIZE_1)
|
constants.EXPECTED_SIZE_1)
|
||||||
|
|
||||||
self.assertEqual(expected_location, location)
|
self.assertIn(expected_location, location)
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.create_share_from_snapshot(
|
mock.call.create_share_from_snapshot(
|
||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
@ -411,6 +551,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SNAP_ID,
|
constants.EXPECTED_SNAP_ID,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS,
|
constants.EXPECTED_VFS,
|
||||||
|
[constants.EXPECTED_IP_10203040],
|
||||||
comment=mock.ANY,
|
comment=mock.ANY,
|
||||||
size=constants.EXPECTED_SIZE_1),
|
size=constants.EXPECTED_SIZE_1),
|
||||||
]
|
]
|
||||||
@ -427,6 +568,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
'id': constants.EXPECTED_SHARE_ID,
|
'id': constants.EXPECTED_SHARE_ID,
|
||||||
'share_proto': constants.CIFS,
|
'share_proto': constants.CIFS,
|
||||||
'size': constants.EXPECTED_SIZE_1,
|
'size': constants.EXPECTED_SIZE_1,
|
||||||
|
'host': constants.EXPECTED_HOST
|
||||||
}
|
}
|
||||||
|
|
||||||
self.driver.delete_share(context, share, share_server)
|
self.driver.delete_share(context, share, share_server)
|
||||||
@ -437,7 +579,8 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.CIFS,
|
constants.CIFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)]
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)]
|
||||||
|
|
||||||
self.mock_mediator.assert_has_calls(expected_calls)
|
self.mock_mediator.assert_has_calls(expected_calls)
|
||||||
|
|
||||||
@ -488,7 +631,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
[constants.ACCESS_RULE_NFS],
|
[constants.ACCESS_RULE_NFS],
|
||||||
[constants.ADD_RULE_IP],
|
[constants.ADD_RULE_IP],
|
||||||
[],
|
[],
|
||||||
constants.ACCESS_INFO)
|
constants.SHARE_SERVER)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.update_access(constants.EXPECTED_PROJECT_ID,
|
mock.call.update_access(constants.EXPECTED_PROJECT_ID,
|
||||||
@ -513,7 +656,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
[constants.ACCESS_RULE_NFS],
|
[constants.ACCESS_RULE_NFS],
|
||||||
[],
|
[],
|
||||||
[constants.DELETE_RULE_IP],
|
[constants.DELETE_RULE_IP],
|
||||||
constants.ACCESS_INFO)
|
constants.SHARE_SERVER)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.update_access(constants.EXPECTED_PROJECT_ID,
|
mock.call.update_access(constants.EXPECTED_PROJECT_ID,
|
||||||
@ -534,7 +677,9 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
old_size = constants.NFS_SHARE_INFO['size']
|
old_size = constants.NFS_SHARE_INFO['size']
|
||||||
new_size = old_size * 2
|
new_size = old_size * 2
|
||||||
|
|
||||||
self.driver.extend_share(constants.NFS_SHARE_INFO, new_size)
|
share_server = None
|
||||||
|
self.driver.extend_share(constants.NFS_SHARE_INFO,
|
||||||
|
new_size, share_server)
|
||||||
|
|
||||||
self.mock_mediator.resize_share.assert_called_once_with(
|
self.mock_mediator.resize_share.assert_called_once_with(
|
||||||
constants.EXPECTED_PROJECT_ID,
|
constants.EXPECTED_PROJECT_ID,
|
||||||
@ -550,8 +695,9 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
|
|
||||||
old_size = constants.NFS_SHARE_INFO['size']
|
old_size = constants.NFS_SHARE_INFO['size']
|
||||||
new_size = old_size / 2
|
new_size = old_size / 2
|
||||||
|
share_server = None
|
||||||
self.driver.shrink_share(constants.NFS_SHARE_INFO, new_size)
|
self.driver.shrink_share(constants.NFS_SHARE_INFO,
|
||||||
|
new_size, share_server)
|
||||||
|
|
||||||
self.mock_mediator.resize_share.assert_called_once_with(
|
self.mock_mediator.resize_share.assert_called_once_with(
|
||||||
constants.EXPECTED_PROJECT_ID,
|
constants.EXPECTED_PROJECT_ID,
|
||||||
@ -616,31 +762,40 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
expected_version = self.driver.VERSION
|
expected_version = self.driver.VERSION
|
||||||
|
|
||||||
self.mock_mediator.get_fpg_status.return_value = {
|
self.mock_mediator.get_fpg_status.return_value = {
|
||||||
'free_capacity_gb': expected_free,
|
'pool_name': constants.EXPECTED_FPG,
|
||||||
'total_capacity_gb': expected_capacity,
|
'total_capacity_gb': expected_capacity,
|
||||||
|
'free_capacity_gb': expected_free,
|
||||||
'thin_provisioning': True,
|
'thin_provisioning': True,
|
||||||
'dedupe': False,
|
'dedupe': False,
|
||||||
'hpe3par_flash_cache': False,
|
'hpe3par_flash_cache': False,
|
||||||
'hp3par_flash_cache': False,
|
'hp3par_flash_cache': False,
|
||||||
|
'reserved_percentage': 0,
|
||||||
|
'provisioned_capacity_gb': expected_capacity
|
||||||
}
|
}
|
||||||
|
|
||||||
expected_result = {
|
expected_result = {
|
||||||
'driver_handles_share_servers': True,
|
'share_backend_name': 'HPE_3PAR',
|
||||||
'qos': False,
|
'vendor_name': 'HPE',
|
||||||
'driver_version': expected_version,
|
'driver_version': expected_version,
|
||||||
'free_capacity_gb': expected_free,
|
'storage_protocol': 'NFS_CIFS',
|
||||||
'max_over_subscription_ratio': None,
|
'driver_handles_share_servers': True,
|
||||||
'pools': None,
|
'total_capacity_gb': 0,
|
||||||
|
'free_capacity_gb': 0,
|
||||||
'provisioned_capacity_gb': 0,
|
'provisioned_capacity_gb': 0,
|
||||||
'reserved_percentage': 0,
|
'reserved_percentage': 0,
|
||||||
'share_backend_name': 'HPE_3PAR',
|
'max_over_subscription_ratio': None,
|
||||||
'storage_protocol': 'NFS_CIFS',
|
'qos': False,
|
||||||
'total_capacity_gb': expected_capacity,
|
|
||||||
'vendor_name': 'HPE',
|
|
||||||
'thin_provisioning': True,
|
'thin_provisioning': True,
|
||||||
'dedupe': False,
|
'pools': [{
|
||||||
'hpe3par_flash_cache': False,
|
'pool_name': constants.EXPECTED_FPG,
|
||||||
'hp3par_flash_cache': False,
|
'total_capacity_gb': expected_capacity,
|
||||||
|
'free_capacity_gb': expected_free,
|
||||||
|
'thin_provisioning': True,
|
||||||
|
'dedupe': False,
|
||||||
|
'hpe3par_flash_cache': False,
|
||||||
|
'hp3par_flash_cache': False,
|
||||||
|
'reserved_percentage': 0,
|
||||||
|
'provisioned_capacity_gb': expected_capacity}],
|
||||||
'snapshot_support': True,
|
'snapshot_support': True,
|
||||||
'replication_domain': None,
|
'replication_domain': None,
|
||||||
'filter_function': None,
|
'filter_function': None,
|
||||||
@ -745,8 +900,8 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
'fpg': constants.EXPECTED_FPG,
|
'fpg': constants.EXPECTED_FPG,
|
||||||
'vfs': constants.EXPECTED_VFS,
|
'vfs': constants.EXPECTED_VFS,
|
||||||
}
|
}
|
||||||
|
metadata = {'request_host': constants.EXPECTED_HOST}
|
||||||
result = self.driver._setup_server(network_info)
|
result = self.driver._setup_server(network_info, metadata)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.create_fsip(constants.EXPECTED_IP_1234,
|
mock.call.create_fsip(constants.EXPECTED_IP_1234,
|
||||||
@ -759,12 +914,60 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
|
|
||||||
self.assertEqual(expected_result, result)
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
def test_setup_server_fails_for_unsupported_network_type(self):
|
||||||
|
"""Setup server fails for unsupported network type"""
|
||||||
|
|
||||||
|
self.init_driver()
|
||||||
|
|
||||||
|
network_info = {
|
||||||
|
'network_allocations': [
|
||||||
|
{'ip_address': constants.EXPECTED_IP_1234}],
|
||||||
|
'cidr': '/'.join((constants.EXPECTED_IP_1234,
|
||||||
|
constants.CIDR_PREFIX)),
|
||||||
|
'network_type': constants.EXPECTED_VXLAN_TYPE,
|
||||||
|
'segmentation_id': constants.EXPECTED_VLAN_TAG,
|
||||||
|
'server_id': constants.EXPECTED_SERVER_ID,
|
||||||
|
}
|
||||||
|
metadata = {'request_host': constants.EXPECTED_HOST}
|
||||||
|
|
||||||
|
self.assertRaises(exception.NetworkBadConfigurationException,
|
||||||
|
self.driver._setup_server,
|
||||||
|
network_info, metadata)
|
||||||
|
|
||||||
|
def test_setup_server_fails_for_exceed_pool_max_supported_ips(self):
|
||||||
|
"""Setup server fails when the VFS has reached max supported IPs"""
|
||||||
|
|
||||||
|
self.init_driver()
|
||||||
|
|
||||||
|
network_info = {
|
||||||
|
'network_allocations': [
|
||||||
|
{'ip_address': constants.EXPECTED_IP_1234}],
|
||||||
|
'cidr': '/'.join((constants.EXPECTED_IP_1234,
|
||||||
|
constants.CIDR_PREFIX)),
|
||||||
|
'network_type': constants.EXPECTED_VLAN_TYPE,
|
||||||
|
'segmentation_id': constants.EXPECTED_VLAN_TAG,
|
||||||
|
'server_id': constants.EXPECTED_SERVER_ID,
|
||||||
|
}
|
||||||
|
metadata = {'request_host': constants.EXPECTED_HOST}
|
||||||
|
|
||||||
|
expected_vfs = self.driver.fpgs[
|
||||||
|
constants.EXPECTED_FPG][constants.EXPECTED_VFS]
|
||||||
|
self.driver.fpgs[constants.EXPECTED_FPG][constants.EXPECTED_VFS] = [
|
||||||
|
'10.0.0.1', '10.0.0.2', '10.0.0.3', '10.0.0.4']
|
||||||
|
|
||||||
|
self.assertRaises(exception.Invalid,
|
||||||
|
self.driver._setup_server,
|
||||||
|
network_info, metadata)
|
||||||
|
self.driver.fpgs[constants.EXPECTED_FPG][constants.EXPECTED_VFS
|
||||||
|
] = expected_vfs
|
||||||
|
|
||||||
def test_teardown_server(self):
|
def test_teardown_server(self):
|
||||||
|
"""Test tear down server"""
|
||||||
|
|
||||||
self.init_driver()
|
self.init_driver()
|
||||||
|
|
||||||
server_details = {
|
server_details = {
|
||||||
'ip': constants.EXPECTED_IP_1234,
|
'ip': constants.EXPECTED_IP_10203040,
|
||||||
'fpg': constants.EXPECTED_FPG,
|
'fpg': constants.EXPECTED_FPG,
|
||||||
'vfs': constants.EXPECTED_VFS,
|
'vfs': constants.EXPECTED_VFS,
|
||||||
}
|
}
|
||||||
@ -772,7 +975,7 @@ class HPE3ParDriverTestCase(test.TestCase):
|
|||||||
self.driver._teardown_server(server_details)
|
self.driver._teardown_server(server_details)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.remove_fsip(constants.EXPECTED_IP_1234,
|
mock.call.remove_fsip(constants.EXPECTED_IP_10203040,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS)
|
||||||
]
|
]
|
||||||
|
@ -79,7 +79,6 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
hpe3par_san_login=constants.SAN_LOGIN,
|
hpe3par_san_login=constants.SAN_LOGIN,
|
||||||
hpe3par_san_password=constants.SAN_PASSWORD,
|
hpe3par_san_password=constants.SAN_PASSWORD,
|
||||||
hpe3par_san_ssh_port=constants.PORT,
|
hpe3par_san_ssh_port=constants.PORT,
|
||||||
hpe3par_share_ip_address=constants.EXPECTED_IP_10203040,
|
|
||||||
hpe3par_cifs_admin_access_username=constants.USERNAME,
|
hpe3par_cifs_admin_access_username=constants.USERNAME,
|
||||||
hpe3par_cifs_admin_access_password=constants.PASSWORD,
|
hpe3par_cifs_admin_access_password=constants.PASSWORD,
|
||||||
hpe3par_cifs_admin_access_domain=constants.EXPECTED_CIFS_DOMAIN,
|
hpe3par_cifs_admin_access_domain=constants.EXPECTED_CIFS_DOMAIN,
|
||||||
@ -506,7 +505,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SNAP_ID,
|
constants.EXPECTED_SNAP_ID,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
[constants.EXPECTED_IP_10203040])
|
||||||
|
|
||||||
self.assertEqual(constants.EXPECTED_SHARE_ID, location)
|
self.assertEqual(constants.EXPECTED_SHARE_ID, location)
|
||||||
|
|
||||||
@ -603,6 +603,7 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SNAP_ID,
|
constants.EXPECTED_SNAP_ID,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS,
|
constants.EXPECTED_VFS,
|
||||||
|
[constants.EXPECTED_IP_10203040],
|
||||||
comment=constants.EXPECTED_COMMENT)
|
comment=constants.EXPECTED_COMMENT)
|
||||||
|
|
||||||
self.assertEqual(constants.EXPECTED_SHARE_ID, location)
|
self.assertEqual(constants.EXPECTED_SHARE_ID, location)
|
||||||
@ -642,7 +643,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SNAP_ID,
|
constants.EXPECTED_SNAP_ID,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
[constants.EXPECTED_IP_10203040])
|
||||||
|
|
||||||
self.assertEqual(constants.EXPECTED_SHARE_PATH, location)
|
self.assertEqual(constants.EXPECTED_SHARE_PATH, location)
|
||||||
|
|
||||||
@ -730,7 +732,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SNAP_ID,
|
constants.EXPECTED_SNAP_ID,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
[constants.EXPECTED_IP_10203040])
|
||||||
self.assertTrue(mock_bad_copy.run.called)
|
self.assertTrue(mock_bad_copy.run.called)
|
||||||
self.assertTrue(mock_bad_copy.get_progress.called)
|
self.assertTrue(mock_bad_copy.get_progress.called)
|
||||||
|
|
||||||
@ -758,7 +761,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SNAP_ID,
|
constants.EXPECTED_SNAP_ID,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
[constants.EXPECTED_IP_10203040])
|
||||||
self.assertTrue(mock_bad_copy.run.called)
|
self.assertTrue(mock_bad_copy.run.called)
|
||||||
|
|
||||||
def test_mediator_create_share_from_snap_not_found(self):
|
def test_mediator_create_share_from_snap_not_found(self):
|
||||||
@ -779,7 +783,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
constants.EXPECTED_SNAP_ID,
|
constants.EXPECTED_SNAP_ID,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
[constants.EXPECTED_IP_10203040])
|
||||||
|
|
||||||
def test_mediator_delete_nfs_share(self):
|
def test_mediator_delete_nfs_share(self):
|
||||||
self.init_mediator()
|
self.init_mediator()
|
||||||
@ -800,7 +805,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.NFS,
|
constants.NFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_SHARE_IP)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.removefshare(constants.NFS_LOWER,
|
mock.call.removefshare(constants.NFS_LOWER,
|
||||||
@ -836,7 +842,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.CIFS,
|
constants.CIFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
self.assertFalse(self.mock_client.removefshare.called)
|
self.assertFalse(self.mock_client.removefshare.called)
|
||||||
|
|
||||||
@ -858,7 +865,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.NFS,
|
constants.NFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
self.mock_client.removefshare.assert_called_once_with(
|
self.mock_client.removefshare.assert_called_once_with(
|
||||||
constants.NFS_LOWER,
|
constants.NFS_LOWER,
|
||||||
@ -883,7 +891,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.CIFS,
|
constants.CIFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.removefshare(constants.SMB_LOWER,
|
mock.call.removefshare(constants.SMB_LOWER,
|
||||||
@ -912,7 +921,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.CIFS,
|
constants.CIFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.removefshare(constants.SMB_LOWER,
|
mock.call.removefshare(constants.SMB_LOWER,
|
||||||
@ -950,7 +960,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.CIFS,
|
constants.CIFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.removefshare(constants.SMB_LOWER,
|
mock.call.removefshare(constants.SMB_LOWER,
|
||||||
@ -997,7 +1008,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.CIFS,
|
constants.CIFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.removefshare(constants.SMB_LOWER,
|
mock.call.removefshare(constants.SMB_LOWER,
|
||||||
@ -1028,6 +1040,7 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
|
|
||||||
expected_mount_path = constants.EXPECTED_MOUNT_PATH + (
|
expected_mount_path = constants.EXPECTED_MOUNT_PATH + (
|
||||||
constants.EXPECTED_SHARE_ID)
|
constants.EXPECTED_SHARE_ID)
|
||||||
|
|
||||||
expected_share_path = '/'.join((expected_mount_path,
|
expected_share_path = '/'.join((expected_mount_path,
|
||||||
constants.EXPECTED_SHARE_ID))
|
constants.EXPECTED_SHARE_ID))
|
||||||
self.mediator._create_mount_directory.assert_called_once_with(
|
self.mediator._create_mount_directory.assert_called_once_with(
|
||||||
@ -1037,7 +1050,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
expected_mount_path,
|
expected_mount_path,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS,
|
constants.EXPECTED_VFS,
|
||||||
constants.EXPECTED_FSTORE)
|
constants.EXPECTED_FSTORE,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
self.mediator._delete_share_directory.assert_has_calls([
|
self.mediator._delete_share_directory.assert_has_calls([
|
||||||
mock.call(expected_share_path),
|
mock.call(expected_share_path),
|
||||||
mock.call(expected_mount_path),
|
mock.call(expected_mount_path),
|
||||||
@ -1065,7 +1079,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.CIFS,
|
constants.CIFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.removefshare(constants.SMB_LOWER,
|
mock.call.removefshare(constants.SMB_LOWER,
|
||||||
@ -1111,7 +1126,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_SIZE_1,
|
constants.EXPECTED_SIZE_1,
|
||||||
constants.CIFS,
|
constants.CIFS,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS)
|
constants.EXPECTED_VFS,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
expected_calls = [
|
expected_calls = [
|
||||||
mock.call.removefshare(constants.SMB_LOWER,
|
mock.call.removefshare(constants.SMB_LOWER,
|
||||||
@ -1153,7 +1169,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
expected_mount_path)
|
expected_mount_path)
|
||||||
self.mediator._mount_super_share.assert_called_with(
|
self.mediator._mount_super_share.assert_called_with(
|
||||||
constants.SMB_LOWER, expected_mount_path, constants.EXPECTED_FPG,
|
constants.SMB_LOWER, expected_mount_path, constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS, constants.EXPECTED_FSTORE)
|
constants.EXPECTED_VFS, constants.EXPECTED_FSTORE,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
self.mediator._delete_share_directory.assert_called_with(
|
self.mediator._delete_share_directory.assert_called_with(
|
||||||
expected_mount_path)
|
expected_mount_path)
|
||||||
self.mediator._unmount_share.assert_called_with(
|
self.mediator._unmount_share.assert_called_with(
|
||||||
@ -1508,6 +1525,7 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
'provisioningType': hpe3parmediator.DEDUPE}
|
'provisioningType': hpe3parmediator.DEDUPE}
|
||||||
|
|
||||||
expected_result = {
|
expected_result = {
|
||||||
|
'pool_name': constants.EXPECTED_FPG,
|
||||||
'free_capacity_gb': expected_free,
|
'free_capacity_gb': expected_free,
|
||||||
'hpe3par_flash_cache': False,
|
'hpe3par_flash_cache': False,
|
||||||
'hp3par_flash_cache': False,
|
'hp3par_flash_cache': False,
|
||||||
@ -2033,7 +2051,7 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
""""Allow user access to unsupported protocol."""
|
""""Allow user access to unsupported protocol."""
|
||||||
self.init_mediator()
|
self.init_mediator()
|
||||||
|
|
||||||
self.assertRaises(exception.InvalidInput,
|
self.assertRaises(exception.InvalidShareAccess,
|
||||||
self.mediator.update_access,
|
self.mediator.update_access,
|
||||||
constants.EXPECTED_PROJECT_ID,
|
constants.EXPECTED_PROJECT_ID,
|
||||||
constants.EXPECTED_SHARE_ID,
|
constants.EXPECTED_SHARE_ID,
|
||||||
@ -2349,7 +2367,7 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
|
|
||||||
@ddt.data('', 'bogus')
|
@ddt.data('', 'bogus')
|
||||||
def test_other_protocol_exception(self, protocol):
|
def test_other_protocol_exception(self, protocol):
|
||||||
self.assertRaises(exception.InvalidInput,
|
self.assertRaises(exception.InvalidShareAccess,
|
||||||
hpe3parmediator.HPE3ParMediator().other_protocol,
|
hpe3parmediator.HPE3ParMediator().other_protocol,
|
||||||
protocol)
|
protocol)
|
||||||
|
|
||||||
@ -2812,7 +2830,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
mount_path = '%s:/%s/%s/%s/' % (constants.EXPECTED_IP_10203040, fpg,
|
mount_path = '%s:/%s/%s/%s/' % (constants.EXPECTED_IP_10203040, fpg,
|
||||||
vfs, fstore)
|
vfs, fstore)
|
||||||
self.mediator._mount_super_share(protocol, mount_location, fpg, vfs,
|
self.mediator._mount_super_share(protocol, mount_location, fpg, vfs,
|
||||||
fstore)
|
fstore,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
utils.execute.assert_called_with('mount', '-t', protocol, mount_path,
|
utils.execute.assert_called_with('mount', '-t', protocol, mount_path,
|
||||||
mount_location, run_as_root=True)
|
mount_location, run_as_root=True)
|
||||||
@ -2825,7 +2844,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.USERNAME, constants.PASSWORD,
|
constants.USERNAME, constants.PASSWORD,
|
||||||
constants.EXPECTED_CIFS_DOMAIN)
|
constants.EXPECTED_CIFS_DOMAIN)
|
||||||
self.mediator._mount_super_share(protocol, mount_location, fpg, vfs,
|
self.mediator._mount_super_share(protocol, mount_location, fpg, vfs,
|
||||||
fstore)
|
fstore,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
utils.execute.assert_called_with('mount', '-t', 'cifs', mount_path,
|
utils.execute.assert_called_with('mount', '-t', 'cifs', mount_path,
|
||||||
mount_location, '-o', user,
|
mount_location, '-o', user,
|
||||||
@ -2844,7 +2864,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
vfs = 'bar-vfs'
|
vfs = 'bar-vfs'
|
||||||
fstore = 'fstore'
|
fstore = 'fstore'
|
||||||
self.mediator._mount_super_share(protocol, mount_location, fpg, vfs,
|
self.mediator._mount_super_share(protocol, mount_location, fpg, vfs,
|
||||||
fstore)
|
fstore,
|
||||||
|
constants.EXPECTED_IP_10203040)
|
||||||
|
|
||||||
# Warning is logged (no exception thrown).
|
# Warning is logged (no exception thrown).
|
||||||
self.assertTrue(mock_log.warning.called)
|
self.assertTrue(mock_log.warning.called)
|
||||||
@ -2903,7 +2924,8 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.SMB_LOWER,
|
constants.SMB_LOWER,
|
||||||
constants.EXPECTED_FPG,
|
constants.EXPECTED_FPG,
|
||||||
constants.EXPECTED_VFS,
|
constants.EXPECTED_VFS,
|
||||||
constants.EXPECTED_FSTORE)
|
constants.EXPECTED_FSTORE,
|
||||||
|
constants.EXPECTED_SHARE_IP)
|
||||||
|
|
||||||
# Warning is logged (no exception thrown).
|
# Warning is logged (no exception thrown).
|
||||||
self.assertTrue(mock_log.warning.called)
|
self.assertTrue(mock_log.warning.called)
|
||||||
@ -2951,25 +2973,25 @@ class HPE3ParMediatorTestCase(test.TestCase):
|
|||||||
constants.EXPECTED_FSTORE,
|
constants.EXPECTED_FSTORE,
|
||||||
constants.EXPECTED_COMMENT)
|
constants.EXPECTED_COMMENT)
|
||||||
|
|
||||||
def test_build_export_location_bad_protocol(self):
|
def test_build_export_locations_bad_protocol(self):
|
||||||
self.assertRaises(exception.InvalidInput,
|
self.assertRaises(exception.InvalidShareAccess,
|
||||||
self.mediator.build_export_location,
|
self.mediator.build_export_locations,
|
||||||
"BOGUS",
|
"BOGUS",
|
||||||
constants.EXPECTED_IP_1234,
|
[constants.EXPECTED_IP_1234],
|
||||||
constants.EXPECTED_SHARE_PATH)
|
constants.EXPECTED_SHARE_PATH)
|
||||||
|
|
||||||
def test_build_export_location_bad_ip(self):
|
def test_build_export_locations_bad_ip(self):
|
||||||
self.assertRaises(exception.InvalidInput,
|
self.assertRaises(exception.InvalidInput,
|
||||||
self.mediator.build_export_location,
|
self.mediator.build_export_locations,
|
||||||
constants.NFS,
|
constants.NFS,
|
||||||
None,
|
None,
|
||||||
None)
|
None)
|
||||||
|
|
||||||
def test_build_export_location_bad_path(self):
|
def test_build_export_locations_bad_path(self):
|
||||||
self.assertRaises(exception.InvalidInput,
|
self.assertRaises(exception.InvalidInput,
|
||||||
self.mediator.build_export_location,
|
self.mediator.build_export_locations,
|
||||||
constants.NFS,
|
constants.NFS,
|
||||||
constants.EXPECTED_IP_1234,
|
[constants.EXPECTED_IP_1234],
|
||||||
None)
|
None)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1692,7 +1692,8 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
)
|
)
|
||||||
])
|
])
|
||||||
self.share_manager._setup_server.assert_called_once_with(
|
self.share_manager._setup_server.assert_called_once_with(
|
||||||
utils.IsAMatcher(context.RequestContext), fake_server)
|
utils.IsAMatcher(context.RequestContext), fake_server,
|
||||||
|
metadata={'request_host': 'fake_host'})
|
||||||
manager.LOG.error.assert_called_with(mock.ANY,
|
manager.LOG.error.assert_called_with(mock.ANY,
|
||||||
fake_share.instance['id'])
|
fake_share.instance['id'])
|
||||||
|
|
||||||
@ -1789,7 +1790,8 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
db.share_server_create.assert_called_once_with(
|
db.share_server_create.assert_called_once_with(
|
||||||
utils.IsAMatcher(context.RequestContext), mock.ANY)
|
utils.IsAMatcher(context.RequestContext), mock.ANY)
|
||||||
self.share_manager._setup_server.assert_called_once_with(
|
self.share_manager._setup_server.assert_called_once_with(
|
||||||
utils.IsAMatcher(context.RequestContext), fake_server)
|
utils.IsAMatcher(context.RequestContext), fake_server,
|
||||||
|
metadata={'request_host': 'fake_host'})
|
||||||
|
|
||||||
def test_create_share_instance_update_replica_state(self):
|
def test_create_share_instance_update_replica_state(self):
|
||||||
share_net = db_utils.create_share_network()
|
share_net = db_utils.create_share_network()
|
||||||
@ -1824,7 +1826,8 @@ class ShareManagerTestCase(test.TestCase):
|
|||||||
db.share_server_create.assert_called_once_with(
|
db.share_server_create.assert_called_once_with(
|
||||||
utils.IsAMatcher(context.RequestContext), mock.ANY)
|
utils.IsAMatcher(context.RequestContext), mock.ANY)
|
||||||
self.share_manager._setup_server.assert_called_once_with(
|
self.share_manager._setup_server.assert_called_once_with(
|
||||||
utils.IsAMatcher(context.RequestContext), fake_server)
|
utils.IsAMatcher(context.RequestContext), fake_server,
|
||||||
|
metadata={'request_host': 'fake_host'})
|
||||||
|
|
||||||
@ddt.data(True, False)
|
@ddt.data(True, False)
|
||||||
def test_create_delete_share_instance_error(self, exception_update_access):
|
def test_create_delete_share_instance_error(self, exception_update_access):
|
||||||
|
@ -0,0 +1,9 @@
|
|||||||
|
|
||||||
|
---
|
||||||
|
features:
|
||||||
|
- HPE 3PAR driver now supports configuring multiple pools per backend.
|
||||||
|
upgrade:
|
||||||
|
- HPE 3PAR driver no longer uses hpe3par_share_ip_address option in
|
||||||
|
configuration. With pool support, configuration just requires
|
||||||
|
hpe3par_fpg option or optionally supply share IP address(es) along with
|
||||||
|
hpe3par_fpg.
|
Loading…
Reference in New Issue
Block a user