Add provisioned_capacity_gb estimation

Currently 'provisioned_capacity_gb' is expected from
the drivers as part of capability reporting.
If driver does not provide it, it defaults to 'allocated_capacity_gb'.
But if driver does not provide both 'allocated_capacity_gb' and
'provisioned_capacity_gb' then 'provisioned_capacity_gb' defaults to 0.
Which affects later calculation of  'provisioned_ratio'.

Hence fixing it by summing up sizes of all the shares of
that host and taking that as 'provisioned_capacity_gb',
in such case.

Change-Id: I844d176eb6f0f5e7b0eb3cbd66c4b413b6757f51
Closes-Bug: #1606691
This commit is contained in:
nidhimittalhada 2016-08-04 12:33:23 +05:30
parent d6637a43b8
commit af7739aea2
6 changed files with 261 additions and 53 deletions

View File

@ -305,9 +305,10 @@ def share_instances_get_all_by_share_server(context, share_server_id):
share_server_id) share_server_id)
def share_instances_get_all_by_host(context, host): def share_instances_get_all_by_host(context, host, with_share_data=False):
"""Returns all share instances with given host.""" """Returns all share instances with given host."""
return IMPL.share_instances_get_all_by_host(context, host) return IMPL.share_instances_get_all_by_host(
context, host, with_share_data=with_share_data)
def share_instances_get_all_by_share_network(context, share_network_id): def share_instances_get_all_by_share_network(context, share_network_id):

View File

@ -1210,10 +1210,28 @@ def share_instance_delete(context, instance_id, session=None):
share_access_delete_all_by_share(context, share['id']) share_access_delete_all_by_share(context, share['id'])
def _set_instances_share_data(context, instances, session):
if instances and not isinstance(instances, list):
instances = [instances]
instances_with_share_data = []
for instance in instances:
try:
parent_share = share_get(context, instance['share_id'],
session=session)
except exception.NotFound:
continue
instance.set_share_data(parent_share)
instances_with_share_data.append(instance)
return instances_with_share_data
@require_admin_context @require_admin_context
def share_instances_get_all_by_host(context, host): def share_instances_get_all_by_host(context, host, with_share_data=False,
session=None):
"""Retrieves all share instances hosted on a host.""" """Retrieves all share instances hosted on a host."""
result = ( session = session or get_session()
instances = (
model_query(context, models.ShareInstance).filter( model_query(context, models.ShareInstance).filter(
or_( or_(
models.ShareInstance.host == host, models.ShareInstance.host == host,
@ -1221,7 +1239,10 @@ def share_instances_get_all_by_host(context, host):
) )
).all() ).all()
) )
return result
if with_share_data:
instances = _set_instances_share_data(context, instances, session)
return instances
@require_context @require_context

View File

@ -68,6 +68,7 @@ LOG = log.getLogger(__name__)
class ReadOnlyDict(IterableUserDict): class ReadOnlyDict(IterableUserDict):
"""A read-only dict.""" """A read-only dict."""
def __init__(self, source=None): def __init__(self, source=None):
self.data = {} self.data = {}
self.update(source) self.update(source)
@ -147,7 +148,8 @@ class HostState(object):
service = {} service = {}
self.service = ReadOnlyDict(service) self.service = ReadOnlyDict(service)
def update_from_share_capability(self, capability, service=None): def update_from_share_capability(
self, capability, service=None, context=None):
"""Update information about a host from its share_node info. """Update information about a host from its share_node info.
'capability' is the status info reported by share backend, a typical 'capability' is the status info reported by share backend, a typical
@ -203,9 +205,9 @@ class HostState(object):
self.update_backend(capability) self.update_backend(capability)
# Update pool level info # Update pool level info
self.update_pools(capability, service) self.update_pools(capability, service, context=context)
def update_pools(self, capability, service): def update_pools(self, capability, service, context=None):
"""Update storage pools information from backend reported info.""" """Update storage pools information from backend reported info."""
if not capability: if not capability:
return return
@ -223,7 +225,8 @@ class HostState(object):
# Add new pool # Add new pool
cur_pool = PoolState(self.host, pool_cap, pool_name) cur_pool = PoolState(self.host, pool_cap, pool_name)
self.pools[pool_name] = cur_pool self.pools[pool_name] = cur_pool
cur_pool.update_from_share_capability(pool_cap, service) cur_pool.update_from_share_capability(
pool_cap, service, context=context)
active_pools.add(pool_name) active_pools.add(pool_name)
elif pools is None: elif pools is None:
@ -250,7 +253,8 @@ class HostState(object):
self._append_backend_info(capability) self._append_backend_info(capability)
self.pools[pool_name] = single_pool self.pools[pool_name] = single_pool
single_pool.update_from_share_capability(capability, service) single_pool.update_from_share_capability(
capability, service, context=context)
active_pools.add(pool_name) active_pools.add(pool_name)
# Remove non-active pools from self.pools # Remove non-active pools from self.pools
@ -341,6 +345,7 @@ class HostState(object):
class PoolState(HostState): class PoolState(HostState):
def __init__(self, host, capabilities, pool_name): def __init__(self, host, capabilities, pool_name):
new_host = share_utils.append_host(host, pool_name) new_host = share_utils.append_host(host, pool_name)
super(PoolState, self).__init__(new_host, capabilities) super(PoolState, self).__init__(new_host, capabilities)
@ -348,7 +353,20 @@ class PoolState(HostState):
# No pools in pool # No pools in pool
self.pools = None self.pools = None
def update_from_share_capability(self, capability, service=None): def _estimate_provisioned_capacity(self, host_name, context=None):
"""Estimate provisioned capacity from share sizes on backend."""
provisioned_capacity = 0
instances = db.share_instances_get_all_by_host(
context, host_name, with_share_data=True)
for instance in instances:
# Size of share instance that's still being created, will be None.
provisioned_capacity += instance['size'] or 0
return provisioned_capacity
def update_from_share_capability(
self, capability, service=None, context=None):
"""Update information about a pool from its share_node info.""" """Update information about a pool from its share_node info."""
self.update_capabilities(capability, service) self.update_capabilities(capability, service)
if capability: if capability:
@ -366,10 +384,15 @@ class PoolState(HostState):
# capacity of all the shares created on a backend, which is # capacity of all the shares created on a backend, which is
# greater than or equal to allocated_capacity_gb, which is the # greater than or equal to allocated_capacity_gb, which is the
# apparent total capacity of all the shares created on a backend # apparent total capacity of all the shares created on a backend
# in Manila. Using allocated_capacity_gb as the default of # in Manila.
# provisioned_capacity_gb if it is not set. # NOTE(nidhimittalhada): If 'provisioned_capacity_gb' is not set,
# then calculating 'provisioned_capacity_gb' from share sizes
# on host, as per information available in manila database.
self.provisioned_capacity_gb = capability.get( self.provisioned_capacity_gb = capability.get(
'provisioned_capacity_gb', self.allocated_capacity_gb) 'provisioned_capacity_gb') or (
self._estimate_provisioned_capacity(self.host,
context=context))
self.max_over_subscription_ratio = capability.get( self.max_over_subscription_ratio = capability.get(
'max_over_subscription_ratio', 'max_over_subscription_ratio',
CONF.max_over_subscription_ratio) CONF.max_over_subscription_ratio)
@ -526,7 +549,7 @@ class HostManager(object):
# Update capabilities and attributes in host_state # Update capabilities and attributes in host_state
host_state.update_from_share_capability( host_state.update_from_share_capability(
capabilities, service=dict(service.items())) capabilities, service=dict(service.items()), context=context)
active_hosts.add(host) active_hosts.add(host)
# remove non-active hosts from host_state_map # remove non-active hosts from host_state_map

View File

@ -18,6 +18,8 @@
"""Testing of SQLAlchemy backend.""" """Testing of SQLAlchemy backend."""
import ddt import ddt
import mock
from oslo_db import exception as db_exception from oslo_db import exception as db_exception
from oslo_utils import uuidutils from oslo_utils import uuidutils
import six import six
@ -193,6 +195,32 @@ class ShareDatabaseAPITestCase(test.TestCase):
self.assertEqual('share-%s' % instance['id'], instance['name']) self.assertEqual('share-%s' % instance['id'], instance['name'])
@ddt.data(True, False)
def test_share_instance_get_all_by_host(self, with_share_data):
db_utils.create_share()
instances = db_api.share_instances_get_all_by_host(
self.ctxt, 'fake_host', with_share_data)
self.assertEqual(1, len(instances))
instance = instances[0]
self.assertEqual('share-%s' % instance['id'], instance['name'])
if with_share_data:
self.assertEqual('NFS', instance['share_proto'])
self.assertEqual(0, instance['size'])
else:
self.assertNotIn('share_proto', instance)
def test_share_instance_get_all_by_host_not_found_exception(self):
db_utils.create_share()
self.mock_object(db_api, 'share_get', mock.Mock(
side_effect=exception.NotFound))
instances = db_api.share_instances_get_all_by_host(
self.ctxt, 'fake_host', True)
self.assertEqual(0, len(instances))
def test_share_instance_get_all_by_consistency_group(self): def test_share_instance_get_all_by_consistency_group(self):
cg = db_utils.create_consistency_group() cg = db_utils.create_consistency_group()
db_utils.create_share(consistency_group_id=cg['id']) db_utils.create_share(consistency_group_id=cg['id'])

View File

@ -25,6 +25,7 @@ from oslo_config import cfg
from oslo_utils import timeutils from oslo_utils import timeutils
from six import moves from six import moves
from manila import context
from manila import db from manila import db
from manila import exception from manila import exception
from manila.scheduler.filters import base_host from manila.scheduler.filters import base_host
@ -143,7 +144,7 @@ class HostManagerTestCase(test.TestCase):
self.assertDictMatch(service_states, expected) self.assertDictMatch(service_states, expected)
def test_get_all_host_states_share(self): def test_get_all_host_states_share(self):
context = 'fake_context' fake_context = context.RequestContext('user', 'project')
topic = CONF.share_topic topic = CONF.share_topic
tmp_pools = copy.deepcopy(fakes.SHARE_SERVICES_WITH_POOLS) tmp_pools = copy.deepcopy(fakes.SHARE_SERVICES_WITH_POOLS)
tmp_enable_pools = tmp_pools[:-2] tmp_enable_pools = tmp_pools[:-2]
@ -155,7 +156,7 @@ class HostManagerTestCase(test.TestCase):
with mock.patch.dict(self.host_manager.service_states, with mock.patch.dict(self.host_manager.service_states,
fakes.SHARE_SERVICE_STATES_WITH_POOLS): fakes.SHARE_SERVICE_STATES_WITH_POOLS):
# Get service # Get service
self.host_manager.get_all_host_states_share(context) self.host_manager.get_all_host_states_share(fake_context)
# Disabled one service # Disabled one service
tmp_enable_pools.pop() tmp_enable_pools.pop()
@ -164,7 +165,7 @@ class HostManagerTestCase(test.TestCase):
mock.Mock(return_value=tmp_enable_pools)) mock.Mock(return_value=tmp_enable_pools))
# Get service again # Get service again
self.host_manager.get_all_host_states_share(context) self.host_manager.get_all_host_states_share(fake_context)
host_state_map = self.host_manager.host_state_map host_state_map = self.host_manager.host_state_map
self.assertEqual(3, len(host_state_map)) self.assertEqual(3, len(host_state_map))
@ -173,10 +174,11 @@ class HostManagerTestCase(test.TestCase):
share_node = fakes.SHARE_SERVICES_WITH_POOLS[i] share_node = fakes.SHARE_SERVICES_WITH_POOLS[i]
host = share_node['host'] host = share_node['host']
self.assertEqual(share_node, host_state_map[host].service) self.assertEqual(share_node, host_state_map[host].service)
db.service_get_all_by_topic.assert_called_once_with(context, topic) db.service_get_all_by_topic.assert_called_once_with(
fake_context, topic)
def test_get_pools_no_pools(self): def test_get_pools_no_pools(self):
context = 'fake_context' fake_context = context.RequestContext('user', 'project')
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True)) self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
self.mock_object( self.mock_object(
db, 'service_get_all_by_topic', db, 'service_get_all_by_topic',
@ -186,7 +188,7 @@ class HostManagerTestCase(test.TestCase):
with mock.patch.dict(self.host_manager.service_states, with mock.patch.dict(self.host_manager.service_states,
fakes.SERVICE_STATES_NO_POOLS): fakes.SERVICE_STATES_NO_POOLS):
res = self.host_manager.get_pools(context) res = self.host_manager.get_pools(context=fake_context)
expected = [ expected = [
{ {
@ -272,7 +274,7 @@ class HostManagerTestCase(test.TestCase):
self.assertIn(pool, res) self.assertIn(pool, res)
def test_get_pools(self): def test_get_pools(self):
context = 'fake_context' fake_context = context.RequestContext('user', 'project')
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True)) self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
self.mock_object( self.mock_object(
db, 'service_get_all_by_topic', db, 'service_get_all_by_topic',
@ -282,7 +284,7 @@ class HostManagerTestCase(test.TestCase):
with mock.patch.dict(self.host_manager.service_states, with mock.patch.dict(self.host_manager.service_states,
fakes.SHARE_SERVICE_STATES_WITH_POOLS): fakes.SHARE_SERVICE_STATES_WITH_POOLS):
res = self.host_manager.get_pools(context) res = self.host_manager.get_pools(fake_context)
expected = [ expected = [
{ {
@ -424,7 +426,7 @@ class HostManagerTestCase(test.TestCase):
self.assertIn(pool, res) self.assertIn(pool, res)
def test_get_pools_host_down(self): def test_get_pools_host_down(self):
context = 'fake_context' fake_context = context.RequestContext('user', 'project')
mock_service_is_up = self.mock_object(utils, 'service_is_up') mock_service_is_up = self.mock_object(utils, 'service_is_up')
self.mock_object( self.mock_object(
db, 'service_get_all_by_topic', db, 'service_get_all_by_topic',
@ -438,7 +440,7 @@ class HostManagerTestCase(test.TestCase):
mock_service_is_up.side_effect = [True, True, True] mock_service_is_up.side_effect = [True, True, True]
# Call once to update the host state map # Call once to update the host state map
self.host_manager.get_pools(context) self.host_manager.get_pools(fake_context)
self.assertEqual(len(fakes.SHARE_SERVICES_NO_POOLS), self.assertEqual(len(fakes.SHARE_SERVICES_NO_POOLS),
len(self.host_manager.host_state_map)) len(self.host_manager.host_state_map))
@ -446,7 +448,7 @@ class HostManagerTestCase(test.TestCase):
# Then mock one host as down # Then mock one host as down
mock_service_is_up.side_effect = [True, True, False] mock_service_is_up.side_effect = [True, True, False]
res = self.host_manager.get_pools(context) res = self.host_manager.get_pools(fake_context)
expected = [ expected = [
{ {
@ -510,7 +512,7 @@ class HostManagerTestCase(test.TestCase):
self.assertIn(pool, res) self.assertIn(pool, res)
def test_get_pools_with_filters(self): def test_get_pools_with_filters(self):
context = 'fake_context' fake_context = context.RequestContext('user', 'project')
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True)) self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
self.mock_object( self.mock_object(
db, 'service_get_all_by_topic', db, 'service_get_all_by_topic',
@ -521,7 +523,8 @@ class HostManagerTestCase(test.TestCase):
fakes.SHARE_SERVICE_STATES_WITH_POOLS): fakes.SHARE_SERVICE_STATES_WITH_POOLS):
res = self.host_manager.get_pools( res = self.host_manager.get_pools(
context, filters={'host': 'host2', 'pool': 'pool*'}) context=fake_context,
filters={'host': 'host2', 'pool': 'pool*'})
expected = [ expected = [
{ {
@ -582,6 +585,7 @@ class HostStateTestCase(test.TestCase):
"""Test case for HostState class.""" """Test case for HostState class."""
def test_update_from_share_capability_nopool(self): def test_update_from_share_capability_nopool(self):
fake_context = context.RequestContext('user', 'project', is_admin=True)
share_capability = {'total_capacity_gb': 0, share_capability = {'total_capacity_gb': 0,
'free_capacity_gb': 100, 'free_capacity_gb': 100,
'reserved_percentage': 0, 'reserved_percentage': 0,
@ -589,7 +593,8 @@ class HostStateTestCase(test.TestCase):
fake_host = host_manager.HostState('host1', share_capability) fake_host = host_manager.HostState('host1', share_capability)
self.assertIsNone(fake_host.free_capacity_gb) self.assertIsNone(fake_host.free_capacity_gb)
fake_host.update_from_share_capability(share_capability) fake_host.update_from_share_capability(share_capability,
context=fake_context)
# Backend level stats remain uninitialized # Backend level stats remain uninitialized
self.assertEqual(0, fake_host.total_capacity_gb) self.assertEqual(0, fake_host.total_capacity_gb)
self.assertIsNone(fake_host.free_capacity_gb) self.assertIsNone(fake_host.free_capacity_gb)
@ -599,18 +604,21 @@ class HostStateTestCase(test.TestCase):
# Test update for existing host state # Test update for existing host state
share_capability.update(dict(total_capacity_gb=1000)) share_capability.update(dict(total_capacity_gb=1000))
fake_host.update_from_share_capability(share_capability) fake_host.update_from_share_capability(share_capability,
context=fake_context)
self.assertEqual(1000, fake_host.pools['_pool0'].total_capacity_gb) self.assertEqual(1000, fake_host.pools['_pool0'].total_capacity_gb)
# Test update for existing host state with different backend name # Test update for existing host state with different backend name
share_capability.update(dict(share_backend_name='magic')) share_capability.update(dict(share_backend_name='magic'))
fake_host.update_from_share_capability(share_capability) fake_host.update_from_share_capability(share_capability,
context=fake_context)
self.assertEqual(1000, fake_host.pools['magic'].total_capacity_gb) self.assertEqual(1000, fake_host.pools['magic'].total_capacity_gb)
self.assertEqual(100, fake_host.pools['magic'].free_capacity_gb) self.assertEqual(100, fake_host.pools['magic'].free_capacity_gb)
# 'pool0' becomes nonactive pool, and is deleted # 'pool0' becomes nonactive pool, and is deleted
self.assertRaises(KeyError, lambda: fake_host.pools['pool0']) self.assertRaises(KeyError, lambda: fake_host.pools['pool0'])
def test_update_from_share_capability_with_pools(self): def test_update_from_share_capability_with_pools(self):
fake_context = context.RequestContext('user', 'project', is_admin=True)
fake_host = host_manager.HostState('host1#pool1') fake_host = host_manager.HostState('host1#pool1')
self.assertIsNone(fake_host.free_capacity_gb) self.assertIsNone(fake_host.free_capacity_gb)
capability = { capability = {
@ -644,7 +652,8 @@ class HostStateTestCase(test.TestCase):
'timestamp': None, 'timestamp': None,
} }
fake_host.update_from_share_capability(capability) fake_host.update_from_share_capability(capability,
context=fake_context)
self.assertEqual('Backend1', fake_host.share_backend_name) self.assertEqual('Backend1', fake_host.share_backend_name)
self.assertEqual('NFS_CIFS', fake_host.storage_protocol) self.assertEqual('NFS_CIFS', fake_host.storage_protocol)
@ -680,7 +689,8 @@ class HostStateTestCase(test.TestCase):
} }
# test update HostState Record # test update HostState Record
fake_host.update_from_share_capability(capability) fake_host.update_from_share_capability(capability,
context=fake_context)
self.assertEqual('1.0', fake_host.driver_version) self.assertEqual('1.0', fake_host.driver_version)
@ -697,13 +707,15 @@ class HostStateTestCase(test.TestCase):
share_capability = { share_capability = {
'total_capacity_gb': 'unknown', 'total_capacity_gb': 'unknown',
'free_capacity_gb': 'unknown', 'free_capacity_gb': 'unknown',
'allocated_capacity_gb': 1,
'reserved_percentage': 0, 'reserved_percentage': 0,
'timestamp': None 'timestamp': None
} }
fake_context = context.RequestContext('user', 'project', is_admin=True)
fake_host = host_manager.HostState('host1#_pool0') fake_host = host_manager.HostState('host1#_pool0')
self.assertIsNone(fake_host.free_capacity_gb) self.assertIsNone(fake_host.free_capacity_gb)
fake_host.update_from_share_capability(share_capability,
fake_host.update_from_share_capability(share_capability) context=fake_context)
# Backend level stats remain uninitialized # Backend level stats remain uninitialized
self.assertEqual(fake_host.total_capacity_gb, 0) self.assertEqual(fake_host.total_capacity_gb, 0)
self.assertIsNone(fake_host.free_capacity_gb) self.assertIsNone(fake_host.free_capacity_gb)
@ -714,6 +726,7 @@ class HostStateTestCase(test.TestCase):
'unknown') 'unknown')
def test_consume_from_share_capability(self): def test_consume_from_share_capability(self):
fake_context = context.RequestContext('user', 'project', is_admin=True)
share_size = 10 share_size = 10
free_capacity = 100 free_capacity = 100
fake_share = {'id': 'foo', 'size': share_size} fake_share = {'id': 'foo', 'size': share_size}
@ -725,7 +738,8 @@ class HostStateTestCase(test.TestCase):
} }
fake_host = host_manager.PoolState('host1', share_capability, '_pool0') fake_host = host_manager.PoolState('host1', share_capability, '_pool0')
fake_host.update_from_share_capability(share_capability) fake_host.update_from_share_capability(share_capability,
context=fake_context)
fake_host.consume_from_share(fake_share) fake_host.consume_from_share(fake_share)
self.assertEqual(fake_host.free_capacity_gb, self.assertEqual(fake_host.free_capacity_gb,
free_capacity - share_size) free_capacity - share_size)
@ -737,11 +751,13 @@ class HostStateTestCase(test.TestCase):
'reserved_percentage': 0, 'reserved_percentage': 0,
'timestamp': None 'timestamp': None
} }
fake_context = context.RequestContext('user', 'project', is_admin=True)
fake_host = host_manager.PoolState('host1', share_capability, '_pool0') fake_host = host_manager.PoolState('host1', share_capability, '_pool0')
share_size = 1000 share_size = 1000
fake_share = {'id': 'foo', 'size': share_size} fake_share = {'id': 'foo', 'size': share_size}
fake_host.update_from_share_capability(share_capability) fake_host.update_from_share_capability(share_capability,
context=fake_context)
fake_host.consume_from_share(fake_share) fake_host.consume_from_share(fake_share)
self.assertEqual(fake_host.total_capacity_gb, 'unknown') self.assertEqual(fake_host.total_capacity_gb, 'unknown')
self.assertEqual(fake_host.free_capacity_gb, 'unknown') self.assertEqual(fake_host.free_capacity_gb, 'unknown')
@ -766,8 +782,10 @@ class HostStateTestCase(test.TestCase):
'timestamp': None, 'timestamp': None,
'reserved_percentage': 0, 'reserved_percentage': 0,
} }
fake_context = context.RequestContext('user', 'project', is_admin=True)
fake_host = host_manager.HostState('host1') fake_host = host_manager.HostState('host1')
fake_host.update_from_share_capability(capability) fake_host.update_from_share_capability(capability,
context=fake_context)
result = fake_host.__repr__() result = fake_host.__repr__()
expected = "host: 'host1', free_capacity_gb: None, " \ expected = "host: 'host1', free_capacity_gb: None, " \
@ -776,25 +794,137 @@ class HostStateTestCase(test.TestCase):
self.assertEqual(expected, result) self.assertEqual(expected, result)
@ddt.ddt
class PoolStateTestCase(test.TestCase): class PoolStateTestCase(test.TestCase):
"""Test case for HostState class.""" """Test case for HostState class."""
def test_update_from_share_capability(self): @ddt.data(
share_capability = { {
'total_capacity_gb': 1024, 'share_capability':
'free_capacity_gb': 512, {'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'reserved_percentage': 0, 'timestamp': None,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'cap1': 'val1', 'instances':
'cap2': 'val2' [
} {
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 4,
'updated_at': timeutils.utcnow()
},
{
'id': 2, 'host': 'host1',
'status': 'available',
'share_id': 12, 'size': None,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'timestamp': None,
'cap1': 'val1', 'cap2': 'val2'},
'instances': []
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'allocated_capacity_gb': 256, 'reserved_percentage': 0,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 4,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'allocated_capacity_gb': 256, 'reserved_percentage': 0,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'instances': []
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'provisioned_capacity_gb': 256, 'reserved_percentage': 0,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 1,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'allocated_capacity_gb': 256, 'provisioned_capacity_gb': 256,
'reserved_percentage': 0, 'timestamp': None, 'cap1': 'val1',
'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 1,
'updated_at': timeutils.utcnow()
},
]
},
)
@ddt.unpack
def test_update_from_share_capability(self, share_capability, instances):
fake_context = context.RequestContext('user', 'project', is_admin=True)
self.mock_object(
db, 'share_instances_get_all_by_host',
mock.Mock(return_value=instances))
fake_pool = host_manager.PoolState('host1', None, 'pool0') fake_pool = host_manager.PoolState('host1', None, 'pool0')
self.assertIsNone(fake_pool.free_capacity_gb) self.assertIsNone(fake_pool.free_capacity_gb)
fake_pool.update_from_share_capability(share_capability) fake_pool.update_from_share_capability(share_capability,
self.assertEqual(fake_pool.host, 'host1#pool0') context=fake_context)
self.assertEqual(fake_pool.pool_name, 'pool0')
self.assertEqual(fake_pool.total_capacity_gb, 1024)
self.assertEqual(fake_pool.free_capacity_gb, 512)
self.assertDictMatch(fake_pool.capabilities, share_capability) self.assertEqual('host1#pool0', fake_pool.host)
self.assertEqual('pool0', fake_pool.pool_name)
self.assertEqual(1024, fake_pool.total_capacity_gb)
self.assertEqual(512, fake_pool.free_capacity_gb)
self.assertDictMatch(share_capability, fake_pool.capabilities)
if 'provisioned_capacity_gb' not in share_capability:
db.share_instances_get_all_by_host.assert_called_once_with(
fake_context, fake_pool.host, with_share_data=True)
if len(instances) > 0:
self.assertEqual(4, fake_pool.provisioned_capacity_gb)
else:
self.assertEqual(0, fake_pool.provisioned_capacity_gb)
if 'allocated_capacity_gb' in share_capability:
self.assertEqual(share_capability['allocated_capacity_gb'],
fake_pool.allocated_capacity_gb)
elif 'allocated_capacity_gb' not in share_capability:
self.assertEqual(0, fake_pool.allocated_capacity_gb)
elif 'provisioned_capacity_gb' in share_capability and (
'allocated_capacity_gb' not in share_capability):
self.assertFalse(db.share_instances_get_all_by_host.called)
self.assertEqual(0, fake_pool.allocated_capacity_gb)
self.assertEqual(share_capability['provisioned_capacity_gb'],
fake_pool.provisioned_capacity_gb)
elif 'provisioned_capacity_gb' in share_capability and (
'allocated_capacity_gb' in share_capability):
self.assertFalse(db.share_instances_get_all_by_host.called)
self.assertEqual(share_capability['allocated_capacity_gb'],
fake_pool.allocated_capacity_gb)
self.assertEqual(share_capability['provisioned_capacity_gb'],
fake_pool.provisioned_capacity_gb)

View File

@ -0,0 +1,5 @@
---
fixes:
- Improve max_over_subscription_ratio enforcement by providing a
reasonable estimate of backend provisioned-capacity when drivers
cannot supply it.