Provide filter name in user messages
Filtering issues are most common when creating a share. User messages now keep information which was the last executed filter if hosts filtering failed. DocImpact Partially-implements: blueprint user-messages Change-Id: I9ce096eebda3249687268e361b7141dea4032b57
This commit is contained in:
parent
ec8370e2d6
commit
220cdfbd9f
@ -10,6 +10,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
from manila import exception
|
||||||
from manila.i18n import _
|
from manila.i18n import _
|
||||||
|
|
||||||
|
|
||||||
@ -61,18 +62,50 @@ class Detail(object):
|
|||||||
NO_ACTIVE_REPLICA = (
|
NO_ACTIVE_REPLICA = (
|
||||||
'006', _("Share has no replica with 'replica_state' set to 'active'."))
|
'006', _("Share has no replica with 'replica_state' set to 'active'."))
|
||||||
|
|
||||||
|
FILTER_MSG = _("No storage could be allocated for this share request, "
|
||||||
|
"%s filter didn't succeed.")
|
||||||
|
FILTER_AVAILABILITY = ('007', FILTER_MSG % 'AvailabilityZone')
|
||||||
|
FILTER_CAPABILITIES = ('008', FILTER_MSG % 'Capabilities')
|
||||||
|
FILTER_CAPACITY = ('009', FILTER_MSG % 'Capacity')
|
||||||
|
FILTER_DRIVER = ('010', FILTER_MSG % 'Driver')
|
||||||
|
FILTER_IGNORE = ('011', FILTER_MSG % 'IgnoreAttemptedHosts')
|
||||||
|
FILTER_JSON = ('012', FILTER_MSG % 'Json')
|
||||||
|
FILTER_RETRY = ('013', FILTER_MSG % 'Retry')
|
||||||
|
FILTER_REPLICATION = ('014', FILTER_MSG % 'ShareReplication')
|
||||||
|
|
||||||
ALL = (UNKNOWN_ERROR,
|
ALL = (UNKNOWN_ERROR,
|
||||||
NO_VALID_HOST,
|
NO_VALID_HOST,
|
||||||
UNEXPECTED_NETWORK,
|
UNEXPECTED_NETWORK,
|
||||||
NO_SHARE_SERVER,
|
NO_SHARE_SERVER,
|
||||||
NO_ACTIVE_AVAILABLE_REPLICA,
|
NO_ACTIVE_AVAILABLE_REPLICA,
|
||||||
NO_ACTIVE_REPLICA,)
|
NO_ACTIVE_REPLICA,
|
||||||
|
FILTER_AVAILABILITY,
|
||||||
|
FILTER_CAPABILITIES,
|
||||||
|
FILTER_CAPACITY,
|
||||||
|
FILTER_DRIVER,
|
||||||
|
FILTER_IGNORE,
|
||||||
|
FILTER_JSON,
|
||||||
|
FILTER_RETRY,
|
||||||
|
FILTER_REPLICATION)
|
||||||
|
|
||||||
# Exception and detail mappings
|
# Exception and detail mappings
|
||||||
EXCEPTION_DETAIL_MAPPINGS = {
|
EXCEPTION_DETAIL_MAPPINGS = {
|
||||||
NO_VALID_HOST: ['NoValidHost'],
|
NO_VALID_HOST: ['NoValidHost'],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Use special code for each filter rather then categorize all as
|
||||||
|
# NO_VALID_HOST
|
||||||
|
FILTER_DETAIL_MAPPINGS = {
|
||||||
|
'AvailabilityZoneFilter': FILTER_AVAILABILITY,
|
||||||
|
'CapabilitiesFilter': FILTER_CAPABILITIES,
|
||||||
|
'CapacityFilter': FILTER_CAPACITY,
|
||||||
|
'DriverFilter': FILTER_DRIVER,
|
||||||
|
'IgnoreAttemptedHostsFilter': FILTER_IGNORE,
|
||||||
|
'JsonFilter': FILTER_JSON,
|
||||||
|
'RetryFilter': FILTER_RETRY,
|
||||||
|
'ShareReplicationFilter': FILTER_REPLICATION,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def translate_action(action_id):
|
def translate_action(action_id):
|
||||||
action_message = next((action[1] for action in Action.ALL
|
action_message = next((action[1] for action in Action.ALL
|
||||||
@ -86,12 +119,23 @@ def translate_detail(detail_id):
|
|||||||
return detail_message or Detail.UNKNOWN_ERROR[1]
|
return detail_message or Detail.UNKNOWN_ERROR[1]
|
||||||
|
|
||||||
|
|
||||||
def translate_detail_id(exception, detail):
|
def translate_detail_id(excep, detail):
|
||||||
if exception is not None and isinstance(exception, Exception):
|
if excep is not None:
|
||||||
for key, value in Detail.EXCEPTION_DETAIL_MAPPINGS.items():
|
detail = _translate_exception_to_detail(excep)
|
||||||
if exception.__class__.__name__ in value:
|
if detail in Detail.ALL:
|
||||||
return key[0]
|
|
||||||
if (detail in Detail.ALL and
|
|
||||||
detail is not Detail.EXCEPTION_DETAIL_MAPPINGS):
|
|
||||||
return detail[0]
|
return detail[0]
|
||||||
return Detail.UNKNOWN_ERROR[0]
|
return Detail.UNKNOWN_ERROR[0]
|
||||||
|
|
||||||
|
|
||||||
|
def _translate_exception_to_detail(ex):
|
||||||
|
if isinstance(ex, exception.NoValidHost):
|
||||||
|
# if NoValidHost was raised because a filter failed (a filter
|
||||||
|
# didn't return any hosts), use a filter-specific detail
|
||||||
|
details = getattr(ex, 'detail_data', {})
|
||||||
|
last_filter = details.get('last_filter')
|
||||||
|
return Detail.FILTER_DETAIL_MAPPINGS.get(
|
||||||
|
last_filter, Detail.NO_VALID_HOST)
|
||||||
|
else:
|
||||||
|
for key, value in Detail.EXCEPTION_DETAIL_MAPPINGS.items():
|
||||||
|
if ex.__class__.__name__ in value:
|
||||||
|
return key
|
||||||
|
@ -83,9 +83,6 @@ class FilterScheduler(base.Scheduler):
|
|||||||
request_spec,
|
request_spec,
|
||||||
filter_properties)
|
filter_properties)
|
||||||
|
|
||||||
if not weighed_host:
|
|
||||||
raise exception.NoValidHost(reason="")
|
|
||||||
|
|
||||||
host = weighed_host.obj.host
|
host = weighed_host.obj.host
|
||||||
share_id = request_spec['share_id']
|
share_id = request_spec['share_id']
|
||||||
snapshot_id = request_spec['snapshot_id']
|
snapshot_id = request_spec['snapshot_id']
|
||||||
@ -111,11 +108,6 @@ class FilterScheduler(base.Scheduler):
|
|||||||
weighed_host = self._schedule_share(
|
weighed_host = self._schedule_share(
|
||||||
context, request_spec, filter_properties)
|
context, request_spec, filter_properties)
|
||||||
|
|
||||||
if not weighed_host:
|
|
||||||
msg = _('Failed to find a weighted host for scheduling share '
|
|
||||||
'replica %s.')
|
|
||||||
raise exception.NoValidHost(reason=msg % share_replica_id)
|
|
||||||
|
|
||||||
host = weighed_host.obj.host
|
host = weighed_host.obj.host
|
||||||
|
|
||||||
updated_share_replica = base.share_replica_update_db(
|
updated_share_replica = base.share_replica_update_db(
|
||||||
@ -218,10 +210,15 @@ class FilterScheduler(base.Scheduler):
|
|||||||
hosts = self.host_manager.get_all_host_states_share(elevated)
|
hosts = self.host_manager.get_all_host_states_share(elevated)
|
||||||
|
|
||||||
# Filter local hosts based on requirements ...
|
# Filter local hosts based on requirements ...
|
||||||
hosts = self.host_manager.get_filtered_hosts(hosts,
|
hosts, last_filter = self.host_manager.get_filtered_hosts(
|
||||||
filter_properties)
|
hosts, filter_properties)
|
||||||
|
|
||||||
if not hosts:
|
if not hosts:
|
||||||
return None
|
msg = _('Failed to find a weighted host, the last executed filter'
|
||||||
|
' was %s.')
|
||||||
|
raise exception.NoValidHost(
|
||||||
|
reason=msg % last_filter,
|
||||||
|
detail_data={'last_filter': last_filter})
|
||||||
|
|
||||||
LOG.debug("Filtered share %(hosts)s", {"hosts": hosts})
|
LOG.debug("Filtered share %(hosts)s", {"hosts": hosts})
|
||||||
# weighted_host = WeightedHost() ... the best
|
# weighted_host = WeightedHost() ... the best
|
||||||
@ -358,8 +355,8 @@ class FilterScheduler(base.Scheduler):
|
|||||||
'size': 0,
|
'size': 0,
|
||||||
}
|
}
|
||||||
# Filter local hosts based on requirements ...
|
# Filter local hosts based on requirements ...
|
||||||
hosts = self.host_manager.get_filtered_hosts(all_hosts,
|
hosts, last_filter = self.host_manager.get_filtered_hosts(
|
||||||
filter_properties)
|
all_hosts, filter_properties)
|
||||||
|
|
||||||
if not hosts:
|
if not hosts:
|
||||||
return []
|
return []
|
||||||
@ -391,7 +388,7 @@ class FilterScheduler(base.Scheduler):
|
|||||||
'resource_type': share_group_type,
|
'resource_type': share_group_type,
|
||||||
}
|
}
|
||||||
|
|
||||||
hosts = self.host_manager.get_filtered_hosts(
|
hosts, last_filter = self.host_manager.get_filtered_hosts(
|
||||||
all_hosts,
|
all_hosts,
|
||||||
filter_properties,
|
filter_properties,
|
||||||
CONF.scheduler_default_share_group_filters)
|
CONF.scheduler_default_share_group_filters)
|
||||||
@ -471,13 +468,16 @@ class FilterScheduler(base.Scheduler):
|
|||||||
context, filter_properties, request_spec)
|
context, filter_properties, request_spec)
|
||||||
|
|
||||||
hosts = self.host_manager.get_all_host_states_share(elevated)
|
hosts = self.host_manager.get_all_host_states_share(elevated)
|
||||||
hosts = self.host_manager.get_filtered_hosts(hosts, filter_properties)
|
hosts, last_filter = self.host_manager.get_filtered_hosts(
|
||||||
|
hosts, filter_properties)
|
||||||
hosts = self.host_manager.get_weighed_hosts(hosts, filter_properties)
|
hosts = self.host_manager.get_weighed_hosts(hosts, filter_properties)
|
||||||
|
|
||||||
for tgt_host in hosts:
|
for tgt_host in hosts:
|
||||||
if tgt_host.obj.host == host:
|
if tgt_host.obj.host == host:
|
||||||
return tgt_host.obj
|
return tgt_host.obj
|
||||||
|
|
||||||
msg = (_('Cannot place share %(id)s on %(host)s')
|
msg = (_('Cannot place share %(id)s on %(host)s, the last executed'
|
||||||
% {'id': request_spec['share_id'], 'host': host})
|
' filter was %(last_filter)s.')
|
||||||
|
% {'id': request_spec['share_id'], 'host': host,
|
||||||
|
'last_filter': last_filter})
|
||||||
raise exception.NoValidHost(reason=msg)
|
raise exception.NoValidHost(reason=msg)
|
||||||
|
@ -87,7 +87,7 @@ class BaseFilterHandler(base_handler.BaseHandler):
|
|||||||
if objs is None:
|
if objs is None:
|
||||||
LOG.debug("Filter %(cls_name)s says to stop filtering",
|
LOG.debug("Filter %(cls_name)s says to stop filtering",
|
||||||
{'cls_name': cls_name})
|
{'cls_name': cls_name})
|
||||||
return
|
return (None, cls_name)
|
||||||
list_objs = list(objs)
|
list_objs = list(objs)
|
||||||
msg = ("Filter %(cls_name)s returned %(obj_len)d host(s)"
|
msg = ("Filter %(cls_name)s returned %(obj_len)d host(s)"
|
||||||
% {'cls_name': cls_name, 'obj_len': len(list_objs)})
|
% {'cls_name': cls_name, 'obj_len': len(list_objs)})
|
||||||
@ -95,4 +95,4 @@ class BaseFilterHandler(base_handler.BaseHandler):
|
|||||||
LOG.info(msg)
|
LOG.info(msg)
|
||||||
break
|
break
|
||||||
LOG.debug(msg)
|
LOG.debug(msg)
|
||||||
return list_objs
|
return (list_objs, cls_name)
|
||||||
|
@ -52,9 +52,16 @@ class MessageFieldTest(test.TestCase):
|
|||||||
self.assertEqual(content, result)
|
self.assertEqual(content, result)
|
||||||
|
|
||||||
@ddt.data({'exception': exception.NoValidHost(reason='fake reason'),
|
@ddt.data({'exception': exception.NoValidHost(reason='fake reason'),
|
||||||
'detail': '',
|
'detail': '', 'expected': '002'},
|
||||||
'expected': '002'},
|
{'exception': exception.NoValidHost(
|
||||||
{'exception': '', 'detail': message_field.Detail.NO_VALID_HOST,
|
detail_data={'last_filter': 'CapacityFilter'},
|
||||||
|
reason='fake reason'),
|
||||||
|
'detail': '', 'expected': '009'},
|
||||||
|
{'exception': exception.NoValidHost(
|
||||||
|
detail_data={'last_filter': 'FakeFilter'},
|
||||||
|
reason='fake reason'),
|
||||||
|
'detail': '', 'expected': '002'},
|
||||||
|
{'exception': None, 'detail': message_field.Detail.NO_VALID_HOST,
|
||||||
'expected': '002'})
|
'expected': '002'})
|
||||||
@ddt.unpack
|
@ddt.unpack
|
||||||
def test_translate_detail_id(self, exception, detail, expected):
|
def test_translate_detail_id(self, exception, detail, expected):
|
||||||
|
@ -150,9 +150,8 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
|||||||
'share_instance_properties': {'project_id': 1, 'size': 1},
|
'share_instance_properties': {'project_id': 1, 'size': 1},
|
||||||
}
|
}
|
||||||
|
|
||||||
weighed_host = sched._schedule_share(fake_context, request_spec, {})
|
self.assertRaises(exception.NoValidHost, sched._schedule_share,
|
||||||
|
fake_context, request_spec, {})
|
||||||
self.assertIsNone(weighed_host)
|
|
||||||
self.assertTrue(_mock_service_get_all_by_topic.called)
|
self.assertTrue(_mock_service_get_all_by_topic.called)
|
||||||
|
|
||||||
@ddt.data(
|
@ddt.data(
|
||||||
@ -203,9 +202,8 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
|||||||
'share_properties': {'project_id': 1, 'size': 1},
|
'share_properties': {'project_id': 1, 'size': 1},
|
||||||
'share_instance_properties': {'project_id': 1, 'size': 1},
|
'share_instance_properties': {'project_id': 1, 'size': 1},
|
||||||
}
|
}
|
||||||
weighed_host = sched._schedule_share(fake_context, request_spec, {})
|
self.assertRaises(exception.NoValidHost, sched._schedule_share,
|
||||||
|
fake_context, request_spec, {})
|
||||||
self.assertIsNone(weighed_host)
|
|
||||||
self.assertTrue(_mock_service_get_all_by_topic.called)
|
self.assertTrue(_mock_service_get_all_by_topic.called)
|
||||||
|
|
||||||
def _setup_dedupe_fakes(self, extra_specs):
|
def _setup_dedupe_fakes(self, extra_specs):
|
||||||
@ -245,9 +243,8 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
|||||||
{'capabilities:dedupe': capability})
|
{'capabilities:dedupe': capability})
|
||||||
fakes.mock_host_manager_db_calls(_mock_service_get_all_by_topic)
|
fakes.mock_host_manager_db_calls(_mock_service_get_all_by_topic)
|
||||||
|
|
||||||
weighed_host = sched._schedule_share(fake_context, request_spec, {})
|
self.assertRaises(exception.NoValidHost, sched._schedule_share,
|
||||||
|
fake_context, request_spec, {})
|
||||||
self.assertIsNone(weighed_host)
|
|
||||||
self.assertTrue(_mock_service_get_all_by_topic.called)
|
self.assertTrue(_mock_service_get_all_by_topic.called)
|
||||||
|
|
||||||
def test_schedule_share_type_is_none(self):
|
def test_schedule_share_type_is_none(self):
|
||||||
@ -275,9 +272,8 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
|||||||
'share_instance_properties': {'availability_zone_id': "fake_az"},
|
'share_instance_properties': {'availability_zone_id': "fake_az"},
|
||||||
}
|
}
|
||||||
|
|
||||||
weighed_host = sched._schedule_share(fake_context, request_spec, {})
|
self.assertRaises(exception.NoValidHost, sched._schedule_share,
|
||||||
|
fake_context, request_spec, {})
|
||||||
self.assertIsNone(weighed_host)
|
|
||||||
self.assertTrue(_mock_service_get_all_by_topic.called)
|
self.assertTrue(_mock_service_get_all_by_topic.called)
|
||||||
|
|
||||||
def test_max_attempts(self):
|
def test_max_attempts(self):
|
||||||
@ -300,7 +296,8 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
|||||||
'share_instance_properties': {},
|
'share_instance_properties': {},
|
||||||
}
|
}
|
||||||
filter_properties = {}
|
filter_properties = {}
|
||||||
sched._schedule_share(self.context, request_spec,
|
self.assertRaises(exception.NoValidHost, sched._schedule_share,
|
||||||
|
self.context, request_spec,
|
||||||
filter_properties=filter_properties)
|
filter_properties=filter_properties)
|
||||||
# Should not have retry info in the populated filter properties.
|
# Should not have retry info in the populated filter properties.
|
||||||
self.assertNotIn("retry", filter_properties)
|
self.assertNotIn("retry", filter_properties)
|
||||||
@ -315,7 +312,8 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
|||||||
'share_instance_properties': {},
|
'share_instance_properties': {},
|
||||||
}
|
}
|
||||||
filter_properties = {}
|
filter_properties = {}
|
||||||
sched._schedule_share(self.context, request_spec,
|
self.assertRaises(exception.NoValidHost, sched._schedule_share,
|
||||||
|
self.context, request_spec,
|
||||||
filter_properties=filter_properties)
|
filter_properties=filter_properties)
|
||||||
num_attempts = filter_properties['retry']['num_attempts']
|
num_attempts = filter_properties['retry']['num_attempts']
|
||||||
self.assertEqual(1, num_attempts)
|
self.assertEqual(1, num_attempts)
|
||||||
@ -331,7 +329,8 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
|||||||
}
|
}
|
||||||
retry = dict(num_attempts=1)
|
retry = dict(num_attempts=1)
|
||||||
filter_properties = dict(retry=retry)
|
filter_properties = dict(retry=retry)
|
||||||
sched._schedule_share(self.context, request_spec,
|
self.assertRaises(exception.NoValidHost, sched._schedule_share,
|
||||||
|
self.context, request_spec,
|
||||||
filter_properties=filter_properties)
|
filter_properties=filter_properties)
|
||||||
num_attempts = filter_properties['retry']['num_attempts']
|
num_attempts = filter_properties['retry']['num_attempts']
|
||||||
self.assertEqual(2, num_attempts)
|
self.assertEqual(2, num_attempts)
|
||||||
@ -500,10 +499,15 @@ class FilterSchedulerTestCase(test_base.SchedulerTestCase):
|
|||||||
|
|
||||||
def test_schedule_create_replica_no_host(self):
|
def test_schedule_create_replica_no_host(self):
|
||||||
sched = fakes.FakeFilterScheduler()
|
sched = fakes.FakeFilterScheduler()
|
||||||
request_spec = fakes.fake_replica_request_spec()
|
request_spec = {
|
||||||
|
'share_type': {'name': 'fake_type'},
|
||||||
self.mock_object(self.driver_cls, '_schedule_share',
|
'share_properties': {'project_id': 1, 'size': 1},
|
||||||
mock.Mock(return_value=None))
|
'share_instance_properties': {'project_id': 1, 'size': 1},
|
||||||
|
}
|
||||||
|
self.mock_object(sched.host_manager, 'get_all_host_states_share',
|
||||||
|
mock.Mock(return_value=[]))
|
||||||
|
self.mock_object(sched.host_manager, 'get_filtered_hosts',
|
||||||
|
mock.Mock(return_value=(None, 'filter')))
|
||||||
|
|
||||||
self.assertRaises(exception.NoValidHost,
|
self.assertRaises(exception.NoValidHost,
|
||||||
sched.schedule_create_replica,
|
sched.schedule_create_replica,
|
||||||
|
@ -129,15 +129,17 @@ class TestBaseFilterHandler(test.TestCase):
|
|||||||
def test_get_filtered_objects_return_none(self, fake3_filter_all,
|
def test_get_filtered_objects_return_none(self, fake3_filter_all,
|
||||||
fake4_filter_all):
|
fake4_filter_all):
|
||||||
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
||||||
result = self._get_filtered_objects(filter_classes)
|
result, last_filter = self._get_filtered_objects(filter_classes)
|
||||||
self.assertIsNone(result)
|
self.assertIsNone(result)
|
||||||
self.assertFalse(fake4_filter_all.called)
|
self.assertFalse(fake4_filter_all.called)
|
||||||
|
self.assertEqual('FakeFilter3', last_filter)
|
||||||
|
|
||||||
def test_get_filtered_objects(self):
|
def test_get_filtered_objects(self):
|
||||||
filter_objs_expected = [1, 2, 3, 4]
|
filter_objs_expected = [1, 2, 3, 4]
|
||||||
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
||||||
result = self._get_filtered_objects(filter_classes)
|
result, last_filter = self._get_filtered_objects(filter_classes)
|
||||||
self.assertEqual(filter_objs_expected, result)
|
self.assertEqual(filter_objs_expected, result)
|
||||||
|
self.assertEqual('FakeFilter4', last_filter)
|
||||||
|
|
||||||
def test_get_filtered_objects_with_filter_run_once(self):
|
def test_get_filtered_objects_with_filter_run_once(self):
|
||||||
filter_objs_expected = [1, 2, 3, 4]
|
filter_objs_expected = [1, 2, 3, 4]
|
||||||
@ -146,14 +148,16 @@ class TestBaseFilterHandler(test.TestCase):
|
|||||||
with mock.patch.object(FakeFilter5, 'filter_all',
|
with mock.patch.object(FakeFilter5, 'filter_all',
|
||||||
return_value=filter_objs_expected
|
return_value=filter_objs_expected
|
||||||
) as fake5_filter_all:
|
) as fake5_filter_all:
|
||||||
result = self._get_filtered_objects(filter_classes)
|
result, last_filter = self._get_filtered_objects(filter_classes)
|
||||||
self.assertEqual(filter_objs_expected, result)
|
self.assertEqual(filter_objs_expected, result)
|
||||||
self.assertEqual(1, fake5_filter_all.call_count)
|
self.assertEqual(1, fake5_filter_all.call_count)
|
||||||
|
|
||||||
result = self._get_filtered_objects(filter_classes, index=1)
|
result, last_filter = self._get_filtered_objects(
|
||||||
|
filter_classes, index=1)
|
||||||
self.assertEqual(filter_objs_expected, result)
|
self.assertEqual(filter_objs_expected, result)
|
||||||
self.assertEqual(1, fake5_filter_all.call_count)
|
self.assertEqual(1, fake5_filter_all.call_count)
|
||||||
|
|
||||||
result = self._get_filtered_objects(filter_classes, index=2)
|
result, last_filter = self._get_filtered_objects(
|
||||||
|
filter_classes, index=2)
|
||||||
self.assertEqual(filter_objs_expected, result)
|
self.assertEqual(filter_objs_expected, result)
|
||||||
self.assertEqual(1, fake5_filter_all.call_count)
|
self.assertEqual(1, fake5_filter_all.call_count)
|
||||||
|
@ -98,8 +98,8 @@ class HostManagerTestCase(test.TestCase):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
self.mock_object(FakeFilterClass1, '_filter_one', fake_filter_one)
|
self.mock_object(FakeFilterClass1, '_filter_one', fake_filter_one)
|
||||||
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
|
result, last_filter = self.host_manager.get_filtered_hosts(
|
||||||
fake_properties)
|
self.fake_hosts, fake_properties)
|
||||||
self._verify_result(info, result)
|
self._verify_result(info, result)
|
||||||
self.host_manager._choose_host_filters.assert_called_once_with(
|
self.host_manager._choose_host_filters.assert_called_once_with(
|
||||||
mock.ANY)
|
mock.ANY)
|
||||||
|
@ -87,7 +87,12 @@ class UserMessageTest(base.BaseSharesAdminTest):
|
|||||||
self.assertEqual(set(MESSAGE_KEYS), set(message.keys()))
|
self.assertEqual(set(MESSAGE_KEYS), set(message.keys()))
|
||||||
self.assertTrue(uuidutils.is_uuid_like(message['id']))
|
self.assertTrue(uuidutils.is_uuid_like(message['id']))
|
||||||
self.assertEqual('001', message['action_id'])
|
self.assertEqual('001', message['action_id'])
|
||||||
self.assertEqual('002', message['detail_id'])
|
# don't check specific detail_id which may vary
|
||||||
|
# depending on order of filters, we can still check
|
||||||
|
# user_message
|
||||||
|
self.assertIn(
|
||||||
|
'No storage could be allocated for this share request',
|
||||||
|
message['user_message'])
|
||||||
self.assertEqual('SHARE', message['resource_type'])
|
self.assertEqual('SHARE', message['resource_type'])
|
||||||
self.assertTrue(uuidutils.is_uuid_like(message['resource_id']))
|
self.assertTrue(uuidutils.is_uuid_like(message['resource_id']))
|
||||||
self.assertEqual('ERROR', message['message_level'])
|
self.assertEqual('ERROR', message['message_level'])
|
||||||
|
Loading…
Reference in New Issue
Block a user