Allow scheduling to disabled manila-share host
- Only allow to admin user and with 'only_host' scheduler hint In all other cases, scheduler will ignore disabled host. Please note, 'disabled' host is up/running(i.e. listening on RPC calls), but disabled using share service set API. Closes-bug: #2072552 Change-Id: Iaaa4e274d47cf2eaa0247234f268fdb5ff24461b
This commit is contained in:
parent
db9bcfef2c
commit
203a5e325f
@ -13,4 +13,5 @@ items that have field ``binary`` equal to ``manila-share``. Also, you can
|
||||
enable or disable share services using raw API requests. Disabling means that
|
||||
share services are excluded from the scheduler cycle and new shares will not
|
||||
be placed on the disabled back end. However, shares from this service stay
|
||||
available.
|
||||
available. With 2024.2 release, admin can schedule share on disabled back end
|
||||
using ``only_host`` scheduler hint.
|
||||
|
@ -103,9 +103,10 @@ def service_get_all(context, disabled=None):
|
||||
return IMPL.service_get_all(context, disabled)
|
||||
|
||||
|
||||
def service_get_all_by_topic(context, topic):
|
||||
def service_get_all_by_topic(context, topic, consider_disabled=False):
|
||||
"""Get all services for a given topic."""
|
||||
return IMPL.service_get_all_by_topic(context, topic)
|
||||
return IMPL.service_get_all_by_topic(context, topic,
|
||||
consider_disabled=consider_disabled)
|
||||
|
||||
|
||||
def service_get_all_share_sorted(context):
|
||||
|
@ -582,12 +582,14 @@ def service_get_all(context, disabled=None):
|
||||
|
||||
@require_admin_context
|
||||
@context_manager.reader
|
||||
def service_get_all_by_topic(context, topic):
|
||||
return (model_query(
|
||||
context, models.Service, read_deleted="no").
|
||||
filter_by(disabled=False).
|
||||
filter_by(topic=topic).
|
||||
all())
|
||||
def service_get_all_by_topic(context, topic, consider_disabled=False):
|
||||
query = model_query(
|
||||
context, models.Service, read_deleted="no")
|
||||
|
||||
if not consider_disabled:
|
||||
query = query.filter_by(disabled=False)
|
||||
|
||||
return query.filter_by(topic=topic).all()
|
||||
|
||||
|
||||
@require_admin_context
|
||||
|
@ -27,6 +27,7 @@ from manila import exception
|
||||
from manila.i18n import _
|
||||
from manila.message import api as message_api
|
||||
from manila.message import message_field
|
||||
from manila import policy
|
||||
from manila.scheduler.drivers import base
|
||||
from manila.scheduler import scheduler_options
|
||||
from manila.share import share_types
|
||||
@ -240,7 +241,16 @@ class FilterScheduler(base.Scheduler):
|
||||
|
||||
# Note: remember, we are using an iterator here. So only
|
||||
# traverse this list once.
|
||||
hosts = self.host_manager.get_all_host_states_share(elevated)
|
||||
consider_disabled = False
|
||||
if policy.check_is_host_admin(context) and filter_properties.get(
|
||||
'scheduler_hints', {}).get('only_host'):
|
||||
# Admin user can schedule share on disabled host
|
||||
consider_disabled = True
|
||||
|
||||
hosts = self.host_manager.get_all_host_states_share(
|
||||
elevated,
|
||||
consider_disabled=consider_disabled
|
||||
)
|
||||
if not hosts:
|
||||
msg = _("There are no hosts to fulfill this "
|
||||
"provisioning request. Are share "
|
||||
|
@ -631,18 +631,23 @@ class HostManager(object):
|
||||
{'service_name': service_name, 'host': host,
|
||||
'cap': capabilities})
|
||||
|
||||
def _update_host_state_map(self, context):
|
||||
|
||||
def _update_host_state_map(self, context, consider_disabled=False):
|
||||
# Get resource usage across the available share nodes:
|
||||
topic = CONF.share_topic
|
||||
share_services = db.service_get_all_by_topic(context, topic)
|
||||
share_services = db.service_get_all_by_topic(
|
||||
context,
|
||||
topic,
|
||||
consider_disabled=consider_disabled,
|
||||
)
|
||||
|
||||
active_hosts = set()
|
||||
for service in share_services:
|
||||
host = service['host']
|
||||
|
||||
# Warn about down services and remove them from host_state_map
|
||||
if not utils.service_is_up(service) or service['disabled']:
|
||||
is_down = not utils.service_is_up(service)
|
||||
is_disabled = (not consider_disabled and service['disabled'])
|
||||
if is_down or is_disabled:
|
||||
LOG.warning("Share service is down. (host: %s).", host)
|
||||
continue
|
||||
|
||||
@ -668,7 +673,7 @@ class HostManager(object):
|
||||
"scheduler cache.", {'host': host})
|
||||
self.host_state_map.pop(host, None)
|
||||
|
||||
def get_all_host_states_share(self, context):
|
||||
def get_all_host_states_share(self, context, consider_disabled=False):
|
||||
"""Returns a dict of all the hosts the HostManager knows about.
|
||||
|
||||
Each of the consumable resources in HostState are
|
||||
@ -678,7 +683,10 @@ class HostManager(object):
|
||||
{'192.168.1.100': HostState(), ...}
|
||||
"""
|
||||
|
||||
self._update_host_state_map(context)
|
||||
self._update_host_state_map(
|
||||
context,
|
||||
consider_disabled=consider_disabled,
|
||||
)
|
||||
|
||||
# Build a pool_state map and return that map instead of host_state_map
|
||||
all_pools = {}
|
||||
|
@ -641,6 +641,35 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
||||
sched.schedule_create_replica,
|
||||
self.context, request_spec, {})
|
||||
|
||||
@mock.patch('manila.db.service_get_all_by_topic')
|
||||
def test__schedule_share_with_disabled_host(
|
||||
self, _mock_service_get_all_by_topic):
|
||||
sched = fakes.FakeFilterScheduler()
|
||||
sched.host_manager = fakes.FakeHostManager()
|
||||
fake_context = context.RequestContext('user', 'project',
|
||||
is_admin=True)
|
||||
fake_type = {'name': 'NFS'}
|
||||
request_spec = {
|
||||
'share_properties': {'project_id': 1, 'size': 1},
|
||||
'share_instance_properties': {},
|
||||
'share_type': fake_type,
|
||||
'share_id': 'fake-id1',
|
||||
}
|
||||
filter_properties = {
|
||||
'scheduler_hints': {'only_host': 'host7#_pool0'}
|
||||
}
|
||||
fakes.mock_host_manager_db_calls(_mock_service_get_all_by_topic,
|
||||
disabled=True)
|
||||
|
||||
weighed_host = sched._schedule_share(
|
||||
fake_context, request_spec,
|
||||
filter_properties=filter_properties)
|
||||
|
||||
self.assertIsNotNone(weighed_host)
|
||||
self.assertIsNotNone(weighed_host.obj)
|
||||
self.assertEqual('host7', weighed_host.obj.host.split('#')[0])
|
||||
self.assertTrue(_mock_service_get_all_by_topic.called)
|
||||
|
||||
def test_schedule_create_replica(self):
|
||||
sched = fakes.FakeFilterScheduler()
|
||||
request_spec = fakes.fake_replica_request_spec()
|
||||
|
@ -530,6 +530,19 @@ class FakeHostManager(host_manager.HostManager):
|
||||
'storage_protocol': 'GLUSTERFS',
|
||||
'vendor_name': 'Dummy',
|
||||
},
|
||||
'host7': {'total_capacity_gb': 'unknown',
|
||||
'free_capacity_gb': 'unknown',
|
||||
'allocated_capacity_gb': 1548,
|
||||
'thin_provisioning': False,
|
||||
'reserved_percentage': 5,
|
||||
'reserved_snapshot_percentage': 2,
|
||||
'reserved_share_extend_percentage': 5,
|
||||
'snapshot_support': True,
|
||||
'create_share_from_snapshot_support': True,
|
||||
'timestamp': None,
|
||||
'storage_protocol': 'NFS_CIFS',
|
||||
'vendor_name': 'Dummy',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -565,6 +578,9 @@ def mock_host_manager_db_calls(mock_obj, disabled=None):
|
||||
dict(id=6, host='host6', topic='share', disabled=False,
|
||||
availability_zone=FAKE_AZ_4, availability_zone_id=FAKE_AZ_4['id'],
|
||||
updated_at=timeutils.utcnow()),
|
||||
dict(id=7, host='host7', topic='share', disabled=True,
|
||||
availability_zone=FAKE_AZ_4, availability_zone_id=FAKE_AZ_4['id'],
|
||||
updated_at=timeutils.utcnow()),
|
||||
]
|
||||
if disabled is None:
|
||||
mock_obj.return_value = services
|
||||
|
@ -169,7 +169,7 @@ class HostManagerTestCase(test.TestCase):
|
||||
host = share_node['host']
|
||||
self.assertEqual(share_node, host_state_map[host].service)
|
||||
db.service_get_all_by_topic.assert_called_once_with(
|
||||
fake_context, topic)
|
||||
fake_context, topic, consider_disabled=False)
|
||||
|
||||
def test_get_pools_no_pools(self):
|
||||
fake_context = context.RequestContext('user', 'project')
|
||||
|
@ -54,7 +54,7 @@ class CapacityWeigherTestCase(test.TestCase):
|
||||
disabled=disabled)
|
||||
host_states = self.host_manager.get_all_host_states_share(ctxt)
|
||||
_mock_service_get_all_by_topic.assert_called_once_with(
|
||||
ctxt, CONF.share_topic)
|
||||
ctxt, CONF.share_topic, consider_disabled=False)
|
||||
return host_states
|
||||
|
||||
# NOTE(xyang): If thin_provisioning = True and
|
||||
|
@ -262,7 +262,7 @@ class NetAppAIQWeigherTestCase(test.TestCase):
|
||||
disabled=disabled)
|
||||
host_states = self.host_manager.get_all_host_states_share(ctxt)
|
||||
_mock_service_get_all_by_topic.assert_called_once_with(
|
||||
ctxt, CONF.share_topic)
|
||||
ctxt, CONF.share_topic, consider_disabled=False)
|
||||
return host_states
|
||||
|
||||
def test_weigh_objects_netapp_only(self):
|
||||
|
@ -80,7 +80,7 @@ class PoolWeigherTestCase(test.TestCase):
|
||||
ctxt = context.get_admin_context()
|
||||
host_states = self.host_manager.get_all_host_states_share(ctxt)
|
||||
db_api.IMPL.service_get_all_by_topic.assert_called_once_with(
|
||||
ctxt, CONF.share_topic)
|
||||
ctxt, CONF.share_topic, consider_disabled=False)
|
||||
return host_states
|
||||
|
||||
def test_no_server_pool_mapping(self):
|
||||
|
@ -0,0 +1,7 @@
|
||||
---
|
||||
fixes:
|
||||
- |
|
||||
Manila now allows OpenStack administrators to schedule shares on hosts
|
||||
that are currently running and marked as under maintenance (`disabled`)
|
||||
through the `only_host` scheduler hint. For more details, please refer to
|
||||
`launchpad bug 2072552 <https://bugs.launchpad.net/manila/+bug/2072552>`_
|
Loading…
Reference in New Issue
Block a user