Browse Source

Merge "Remove provisioned calculation on non thin provision backends" into stable/queens

changes/09/725109/1
Zuul 2 months ago
committed by Gerrit Code Review
parent
commit
dd47c3c7b9
3 changed files with 204 additions and 31 deletions
  1. +10
    -5
      manila/scheduler/host_manager.py
  2. +187
    -26
      manila/tests/scheduler/test_host_manager.py
  3. +7
    -0
      releasenotes/notes/bug-1869712-fix-increased-scheduled-time-for-non-thin-provisioned-backends-1da2cc33d365ba4f.yaml

+ 10
- 5
manila/scheduler/host_manager.py View File

@@ -422,6 +422,8 @@ class PoolState(HostState):
'allocated_capacity_gb', 0)
self.qos = capability.get('qos', False)
self.reserved_percentage = capability['reserved_percentage']
self.thin_provisioning = scheduler_utils.thin_provisioning(
capability.get('thin_provisioning', False))
# NOTE(xyang): provisioned_capacity_gb is the apparent total
# capacity of all the shares created on a backend, which is
# greater than or equal to allocated_capacity_gb, which is the
@@ -430,16 +432,19 @@ class PoolState(HostState):
# NOTE(nidhimittalhada): If 'provisioned_capacity_gb' is not set,
# then calculating 'provisioned_capacity_gb' from share sizes
# on host, as per information available in manila database.
# NOTE(jose-castro-leon): Only calculate provisioned_capacity_gb
# on thin provisioned pools
self.provisioned_capacity_gb = capability.get(
'provisioned_capacity_gb') or (
self._estimate_provisioned_capacity(self.host,
context=context))
'provisioned_capacity_gb')

if self.thin_provisioning and self.provisioned_capacity_gb is None:
self.provisioned_capacity_gb = (
self._estimate_provisioned_capacity(self.host,
context=context))

self.max_over_subscription_ratio = capability.get(
'max_over_subscription_ratio',
CONF.max_over_subscription_ratio)
self.thin_provisioning = capability.get(
'thin_provisioning', False)
self.dedupe = capability.get(
'dedupe', False)
self.compression = capability.get(


+ 187
- 26
manila/tests/scheduler/test_host_manager.py View File

@@ -30,6 +30,7 @@ from manila import db
from manila import exception
from manila.scheduler.filters import base_host
from manila.scheduler import host_manager
from manila.scheduler import utils as scheduler_utils
from manila import test
from manila.tests.scheduler import fakes
from manila import utils
@@ -865,6 +866,7 @@ class PoolStateTestCase(test.TestCase):
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'timestamp': None,
'thin_provisioning': True,
'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
@@ -882,6 +884,70 @@ class PoolStateTestCase(test.TestCase):
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'timestamp': None,
'thin_provisioning': False, 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 1,
'updated_at': timeutils.utcnow()
},
{
'id': 2, 'host': 'host1',
'status': 'available',
'share_id': 12, 'size': None,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'timestamp': None,
'thin_provisioning': [False], 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 1,
'updated_at': timeutils.utcnow()
},
{
'id': 2, 'host': 'host1',
'status': 'available',
'share_id': 12, 'size': None,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'timestamp': None,
'thin_provisioning': [True, False], 'cap1': 'val1',
'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 4,
'updated_at': timeutils.utcnow()
},
{
'id': 2, 'host': 'host1',
'status': 'available',
'share_id': 12, 'size': None,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
@@ -890,6 +956,30 @@ class PoolStateTestCase(test.TestCase):
'ipv6_support': False},
'instances': []
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'timestamp': None,
'thin_provisioning': True, 'cap1': 'val1', 'cap2': 'val2',
'ipv4_support': True, 'ipv6_support': False},
'instances': []
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'timestamp': None,
'thin_provisioning': [False], 'cap1': 'val1', 'cap2': 'val2',
'ipv4_support': True, 'ipv6_support': False},
'instances': []
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'reserved_percentage': 0, 'timestamp': None,
'thin_provisioning': [True, False], 'cap1': 'val1',
'cap2': 'val2', 'ipv4_support': True, 'ipv6_support': False},
'instances': []
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
@@ -932,12 +1022,76 @@ class PoolStateTestCase(test.TestCase):
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'allocated_capacity_gb': 256, 'provisioned_capacity_gb': 1,
'thin_provisioning': True, 'reserved_percentage': 0,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 1,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'allocated_capacity_gb': 256, 'provisioned_capacity_gb': 1,
'thin_provisioning': [False], 'reserved_percentage': 0,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 1,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'allocated_capacity_gb': 256, 'provisioned_capacity_gb': 1,
'thin_provisioning': [True, False], 'reserved_percentage': 0,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 1,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'allocated_capacity_gb': 256, 'provisioned_capacity_gb': 256,
'reserved_percentage': 0, 'timestamp': None, 'cap1': 'val1',
'cap2': 'val2'},
'thin_provisioning': False, 'reserved_percentage': 0,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
'id': 1, 'host': 'host1',
'status': 'available',
'share_id': 11, 'size': 1,
'updated_at': timeutils.utcnow()
},
]
},
{
'share_capability':
{'total_capacity_gb': 1024, 'free_capacity_gb': 512,
'allocated_capacity_gb': 256, 'provisioned_capacity_gb': 256,
'thin_provisioning': [False], 'reserved_percentage': 0,
'timestamp': None, 'cap1': 'val1', 'cap2': 'val2'},
'instances':
[
{
@@ -967,35 +1121,42 @@ class PoolStateTestCase(test.TestCase):
self.assertEqual(512, fake_pool.free_capacity_gb)
self.assertDictMatch(share_capability, fake_pool.capabilities)

if 'provisioned_capacity_gb' not in share_capability:
db.share_instances_get_all_by_host.assert_called_once_with(
fake_context, fake_pool.host, with_share_data=True)

if len(instances) > 0:
self.assertEqual(4, fake_pool.provisioned_capacity_gb)
if 'thin_provisioning' in share_capability:
thin_provisioned = scheduler_utils.thin_provisioning(
share_capability['thin_provisioning'])
else:
thin_provisioned = False

if thin_provisioned:
self.assertEqual(thin_provisioned, fake_pool.thin_provisioning)
if 'provisioned_capacity_gb' not in share_capability or (
share_capability['provisioned_capacity_gb'] is None):
db.share_instances_get_all_by_host.assert_called_once_with(
fake_context, fake_pool.host, with_share_data=True)
if len(instances) > 0:
self.assertEqual(4, fake_pool.provisioned_capacity_gb)
else:
self.assertEqual(0, fake_pool.provisioned_capacity_gb)
else:
self.assertEqual(0, fake_pool.provisioned_capacity_gb)

if 'allocated_capacity_gb' in share_capability:
self.assertEqual(share_capability['allocated_capacity_gb'],
fake_pool.allocated_capacity_gb)
elif 'allocated_capacity_gb' not in share_capability:
self.assertEqual(0, fake_pool.allocated_capacity_gb)
elif 'provisioned_capacity_gb' in share_capability and (
'allocated_capacity_gb' not in share_capability):
self.assertFalse(db.share_instances_get_all_by_host.called)

self.assertEqual(0, fake_pool.allocated_capacity_gb)
self.assertEqual(share_capability['provisioned_capacity_gb'],
fake_pool.provisioned_capacity_gb)
elif 'provisioned_capacity_gb' in share_capability and (
'allocated_capacity_gb' in share_capability):
self.assertFalse(db.share_instances_get_all_by_host.called)
self.assertEqual(share_capability['provisioned_capacity_gb'],
fake_pool.provisioned_capacity_gb)
else:
self.assertFalse(fake_pool.thin_provisioning)
self.assertFalse(db.share_instances_get_all_by_host.called)
if 'provisioned_capacity_gb' not in share_capability or (
share_capability['provisioned_capacity_gb'] is None):
self.assertIsNone(fake_pool.provisioned_capacity_gb)
else:
self.assertEqual(share_capability['provisioned_capacity_gb'],
fake_pool.provisioned_capacity_gb)

if 'allocated_capacity_gb' in share_capability:
self.assertEqual(share_capability['allocated_capacity_gb'],
fake_pool.allocated_capacity_gb)
self.assertEqual(share_capability['provisioned_capacity_gb'],
fake_pool.provisioned_capacity_gb)
else:
self.assertEqual(0, fake_pool.allocated_capacity_gb)

if 'ipv4_support' in share_capability:
self.assertEqual(share_capability['ipv4_support'],
fake_pool.ipv4_support)


+ 7
- 0
releasenotes/notes/bug-1869712-fix-increased-scheduled-time-for-non-thin-provisioned-backends-1da2cc33d365ba4f.yaml View File

@@ -0,0 +1,7 @@
---
fixes:
- |
Reduces an increase of schedule time for non thin provisioned backends.
On those backends, there is no need to calculate provisioned_capacity_gb,
as it is not used during the scheduling. This calculation was not scaling
properly on big environments as it implies many database queries.

Loading…
Cancel
Save