Add include_disabled parameter to service_get_all_by_binary
The database query service_get_all_by_binary should return a list of services with the specified binary but it currently excludes disabled hosts. This change adds a name parameter called 'include_disabled' which provides the caller with the option of having disabled hosts returned too. This query is used by the scheduler to retrieve the list of available hosts. The initial list should include disabled hosts. The ComputeFilter removes disabled hosts so inclusion of this filter is the proper way to exclude disabled hosts. Excluding disabled hosts from the initial list prevents the forcehost feature from being used to place an instance on a disabled host. The query is only used in objects.ServiceList.get_by_binary which is only called in one other place, in cells/state.py by CellStateManager._get_compute_hosts. Adding the named parameter to the get_by_binary method of the ServiceList class means the behaviour of CellStateManager._get_compute_hosts will not be impacted. Change-Id: I05c2716da45119e6e6aa272b0701be9ae098842c Closes-Bug: #1553099
This commit is contained in:
parent
3d7e403cc7
commit
214b7550bc
@ -141,9 +141,13 @@ def service_get_all_by_topic(context, topic):
|
||||
return IMPL.service_get_all_by_topic(context, topic)
|
||||
|
||||
|
||||
def service_get_all_by_binary(context, binary):
|
||||
"""Get all services for a given binary."""
|
||||
return IMPL.service_get_all_by_binary(context, binary)
|
||||
def service_get_all_by_binary(context, binary, include_disabled=False):
|
||||
"""Get services for a given binary.
|
||||
|
||||
Includes disabled services if 'include_disabled' parameter is True
|
||||
"""
|
||||
return IMPL.service_get_all_by_binary(context, binary,
|
||||
include_disabled=include_disabled)
|
||||
|
||||
|
||||
def service_get_all_by_host(context, host):
|
||||
|
@ -545,11 +545,12 @@ def service_get_by_host_and_topic(context, host, topic):
|
||||
|
||||
|
||||
@pick_context_manager_reader
|
||||
def service_get_all_by_binary(context, binary):
|
||||
return model_query(context, models.Service, read_deleted="no").\
|
||||
filter_by(disabled=False).\
|
||||
filter_by(binary=binary).\
|
||||
all()
|
||||
def service_get_all_by_binary(context, binary, include_disabled=False):
|
||||
query = model_query(context, models.Service, read_deleted="no").\
|
||||
filter_by(binary=binary)
|
||||
if not include_disabled:
|
||||
query = query.filter_by(disabled=False)
|
||||
return query.all()
|
||||
|
||||
|
||||
@pick_context_manager_reader
|
||||
|
@ -378,7 +378,8 @@ class ServiceList(base.ObjectListBase, base.NovaObject):
|
||||
# Version 1.15: Service version 1.17
|
||||
# Version 1.16: Service version 1.18
|
||||
# Version 1.17: Service version 1.19
|
||||
VERSION = '1.17'
|
||||
# Version 1.18: Added include_disabled parameter to get_by_binary()
|
||||
VERSION = '1.18'
|
||||
|
||||
fields = {
|
||||
'objects': fields.ListOfObjectsField('Service'),
|
||||
@ -390,9 +391,12 @@ class ServiceList(base.ObjectListBase, base.NovaObject):
|
||||
return base.obj_make_list(context, cls(context), objects.Service,
|
||||
db_services)
|
||||
|
||||
# NOTE(paul-carlton2): In v2.0 of the object the include_disabled flag
|
||||
# will be removed so both enabled and disabled hosts are returned
|
||||
@base.remotable_classmethod
|
||||
def get_by_binary(cls, context, binary):
|
||||
db_services = db.service_get_all_by_binary(context, binary)
|
||||
def get_by_binary(cls, context, binary, include_disabled=False):
|
||||
db_services = db.service_get_all_by_binary(
|
||||
context, binary, include_disabled=include_disabled)
|
||||
return base.obj_make_list(context, cls(context), objects.Service,
|
||||
db_services)
|
||||
|
||||
|
@ -550,7 +550,7 @@ class HostManager(object):
|
||||
|
||||
service_refs = {service.host: service
|
||||
for service in objects.ServiceList.get_by_binary(
|
||||
context, 'nova-compute')}
|
||||
context, 'nova-compute', include_disabled=True)}
|
||||
# Get resource usage across the available compute nodes:
|
||||
compute_nodes = objects.ComputeNodeList.get_all(context)
|
||||
seen_nodes = set()
|
||||
|
@ -3456,6 +3456,19 @@ class ServiceTestCase(test.TestCase, ModelsObjectComparatorMixin):
|
||||
real = db.service_get_all_by_binary(self.ctxt, 'b1')
|
||||
self._assertEqualListsOfObjects(expected, real)
|
||||
|
||||
def test_service_get_all_by_binary_include_disabled(self):
|
||||
values = [
|
||||
{'host': 'host1', 'binary': 'b1'},
|
||||
{'host': 'host2', 'binary': 'b1'},
|
||||
{'disabled': True, 'binary': 'b1'},
|
||||
{'host': 'host3', 'binary': 'b2'}
|
||||
]
|
||||
services = [self._create_service(vals) for vals in values]
|
||||
expected = services[:3]
|
||||
real = db.service_get_all_by_binary(self.ctxt, 'b1',
|
||||
include_disabled=True)
|
||||
self._assertEqualListsOfObjects(expected, real)
|
||||
|
||||
def test_service_get_all_by_host(self):
|
||||
values = [
|
||||
{'host': 'host1', 'topic': 't11', 'binary': 'b11'},
|
||||
|
@ -1181,7 +1181,7 @@ object_data = {
|
||||
'SecurityGroupRule': '1.1-ae1da17b79970012e8536f88cb3c6b29',
|
||||
'SecurityGroupRuleList': '1.2-0005c47fcd0fb78dd6d7fd32a1409f5b',
|
||||
'Service': '1.19-8914320cbeb4ec29f252d72ce55d07e1',
|
||||
'ServiceList': '1.17-b767102cba7cbed290e396114c3f86b3',
|
||||
'ServiceList': '1.18-6c52cb616621c1af2415dcc11faf5c1a',
|
||||
'ServiceStatusNotification': '1.0-a73147b93b520ff0061865849d3dfa56',
|
||||
'ServiceStatusPayload': '1.0-a5e7b4fd6cc5581be45b31ff1f3a3f7f',
|
||||
'TaskLog': '1.0-78b0534366f29aa3eebb01860fbe18fe',
|
||||
|
@ -30,22 +30,29 @@ from nova.tests.unit.objects import test_compute_node
|
||||
from nova.tests.unit.objects import test_objects
|
||||
|
||||
NOW = timeutils.utcnow().replace(microsecond=0)
|
||||
fake_service = {
|
||||
'created_at': NOW,
|
||||
'updated_at': None,
|
||||
'deleted_at': None,
|
||||
'deleted': False,
|
||||
'id': 123,
|
||||
'host': 'fake-host',
|
||||
'binary': 'nova-fake',
|
||||
'topic': 'fake-service-topic',
|
||||
'report_count': 1,
|
||||
'forced_down': False,
|
||||
'disabled': False,
|
||||
'disabled_reason': None,
|
||||
'last_seen_up': None,
|
||||
'version': service.SERVICE_VERSION,
|
||||
}
|
||||
|
||||
|
||||
def _fake_service(**kwargs):
|
||||
fake_service = {
|
||||
'created_at': NOW,
|
||||
'updated_at': None,
|
||||
'deleted_at': None,
|
||||
'deleted': False,
|
||||
'id': 123,
|
||||
'host': 'fake-host',
|
||||
'binary': 'nova-fake',
|
||||
'topic': 'fake-service-topic',
|
||||
'report_count': 1,
|
||||
'forced_down': False,
|
||||
'disabled': False,
|
||||
'disabled_reason': None,
|
||||
'last_seen_up': None,
|
||||
'version': service.SERVICE_VERSION,
|
||||
}
|
||||
fake_service.update(kwargs)
|
||||
return fake_service
|
||||
|
||||
fake_service = _fake_service()
|
||||
|
||||
OPTIONAL = ['availability_zone', 'compute_node']
|
||||
|
||||
@ -188,7 +195,32 @@ class _TestServiceObject(object):
|
||||
services = service.ServiceList.get_by_binary(self.context,
|
||||
'fake-binary')
|
||||
self.assertEqual(1, len(services))
|
||||
mock_get.assert_called_once_with(self.context, 'fake-binary')
|
||||
mock_get.assert_called_once_with(self.context,
|
||||
'fake-binary',
|
||||
include_disabled=False)
|
||||
|
||||
@mock.patch('nova.db.service_get_all_by_binary')
|
||||
def test_get_by_binary_disabled(self, mock_get):
|
||||
mock_get.return_value = [_fake_service(disabled=True)]
|
||||
services = service.ServiceList.get_by_binary(self.context,
|
||||
'fake-binary',
|
||||
include_disabled=True)
|
||||
self.assertEqual(1, len(services))
|
||||
mock_get.assert_called_once_with(self.context,
|
||||
'fake-binary',
|
||||
include_disabled=True)
|
||||
|
||||
@mock.patch('nova.db.service_get_all_by_binary')
|
||||
def test_get_by_binary_both(self, mock_get):
|
||||
mock_get.return_value = [_fake_service(),
|
||||
_fake_service(disabled=True)]
|
||||
services = service.ServiceList.get_by_binary(self.context,
|
||||
'fake-binary',
|
||||
include_disabled=True)
|
||||
self.assertEqual(2, len(services))
|
||||
mock_get.assert_called_once_with(self.context,
|
||||
'fake-binary',
|
||||
include_disabled=True)
|
||||
|
||||
def test_get_by_host(self):
|
||||
self.mox.StubOutWithMock(db, 'service_get_all_by_host')
|
||||
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
upgrade:
|
||||
- The FilterScheduler is now including disabled hosts.
|
||||
Make sure you include the ComputeFilter in the
|
||||
``scheduler_default_filters`` config option to avoid
|
||||
placing instances on disabled hosts.
|
Loading…
Reference in New Issue
Block a user