Create a fixture around fake_notifier
The fake_notifier uses module globals and also needs careful stub and reset calls to work properly. This patch wraps the fake_notifier into a proper Fixture that automates the complexity. This is fairly rage patch but it does not change any logic just redirect calls from the fake_notifier to the new NotificationFixture Change-Id: I456f685f480b8de71014cf232a8f08c731605ad8
This commit is contained in:
parent
696fbab9e1
commit
f1f599d098
|
@ -20,6 +20,7 @@ from .glance import GlanceFixture # noqa: F401
|
|||
from .libvirt import LibvirtFixture # noqa: F401
|
||||
from .libvirt_imagebackend import LibvirtImageBackendFixture # noqa: F401
|
||||
from .neutron import NeutronFixture # noqa: F401
|
||||
from .notifications import NotificationFixture # noqa: F401
|
||||
from .nova import * # noqa: F401, F403
|
||||
from .os_brick import OSBrickFixture # noqa: F401
|
||||
from .policy import OverridePolicyFixture # noqa: F401
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import fixtures
|
||||
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class NotificationFixture(fixtures.Fixture):
|
||||
def __init__(self, test):
|
||||
self.test = test
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
fake_notifier.stub_notifier(self.test)
|
||||
|
||||
def reset(self):
|
||||
fake_notifier.reset()
|
||||
|
||||
def wait_for_versioned_notifications(
|
||||
self, event_type, n_events=1, timeout=10.0,
|
||||
):
|
||||
return fake_notifier.VERSIONED_SUBS[event_type].wait_n(
|
||||
n_events, event_type, timeout,
|
||||
)
|
||||
|
||||
@property
|
||||
def versioned_notifications(self):
|
||||
return fake_notifier.VERSIONED_NOTIFICATIONS
|
|
@ -15,7 +15,7 @@ from oslo_utils.fixture import uuidsentinel as uuids
|
|||
from nova import context
|
||||
from nova import objects
|
||||
from nova import test
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova.tests import fixtures
|
||||
|
||||
|
||||
class ImageCacheTest(test.TestCase):
|
||||
|
@ -26,8 +26,8 @@ class ImageCacheTest(test.TestCase):
|
|||
|
||||
self.flags(compute_driver='fake.FakeDriverWithCaching')
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(fixtures.NotificationFixture(self))
|
||||
|
||||
self.context = context.get_admin_context()
|
||||
|
||||
self.conductor = self.start_service('conductor')
|
||||
|
@ -70,10 +70,10 @@ class ImageCacheTest(test.TestCase):
|
|||
mgr = getattr(self, host)
|
||||
self.assertEqual(set(), mgr.driver.cached_images)
|
||||
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'aggregate.cache_images.start')
|
||||
|
||||
progress = fake_notifier.wait_for_versioned_notifications(
|
||||
progress = self.notifier.wait_for_versioned_notifications(
|
||||
'aggregate.cache_images.progress', n_events=4)
|
||||
self.assertEqual(4, len(progress), progress)
|
||||
for notification in progress:
|
||||
|
@ -89,7 +89,7 @@ class ImageCacheTest(test.TestCase):
|
|||
self.assertEqual(4, payload['total'])
|
||||
self.assertIn('conductor', notification['publisher_id'])
|
||||
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'aggregate.cache_images.end')
|
||||
|
||||
logtext = self.stdlog.logger.output
|
||||
|
|
|
@ -16,7 +16,6 @@ import time
|
|||
from nova import context as nova_context
|
||||
from nova import objects
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class ComputeManagerInitHostTestCase(
|
||||
|
@ -164,7 +163,7 @@ class TestComputeRestartInstanceStuckInBuild(
|
|||
|
||||
# the instance.create.start is the closest thing to the
|
||||
# instance_claim call we can wait for in the test
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.create.start')
|
||||
|
||||
with mock.patch('nova.compute.manager.LOG.debug') as mock_log:
|
||||
|
|
|
@ -19,7 +19,6 @@ from nova import exception
|
|||
from nova import test
|
||||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class FakeCinderError(object):
|
||||
|
@ -48,8 +47,6 @@ class LiveMigrationCinderFailure(integrated_helpers._IntegratedTestBase):
|
|||
|
||||
def setUp(self):
|
||||
super(LiveMigrationCinderFailure, self).setUp()
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
# Start a second compute node (the first one was started for us by
|
||||
# _IntegratedTestBase. set_nodes() is needed to avoid duplicate
|
||||
# nodenames. See comments in test_bug_1702454.py.
|
||||
|
|
|
@ -32,7 +32,6 @@ from nova import test
|
|||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional import fixtures as func_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova.virt import driver as virt_driver
|
||||
|
||||
|
||||
|
@ -703,8 +702,6 @@ class TestProviderConfig(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
self.api = self.useFixture(nova_fixtures.OSAPIFixture(
|
||||
api_version='v2.1')).admin_api
|
||||
self.api.microversion = 'latest'
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.start_service('conductor')
|
||||
# start nova-compute that will not have the additional trait.
|
||||
self._start_compute("fake-host-1")
|
||||
|
|
|
@ -40,7 +40,6 @@ from nova import test
|
|||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional.api import client as api_client
|
||||
from nova.tests.functional import fixtures as func_fixtures
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova import utils
|
||||
|
||||
|
||||
|
@ -408,7 +407,7 @@ class InstanceHelperMixin:
|
|||
self.api.post_server_action(
|
||||
server['id'], {'reboot': {'type': 'HARD' if hard else 'SOFT'}},
|
||||
)
|
||||
fake_notifier.wait_for_versioned_notifications('instance.reboot.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.reboot.end')
|
||||
return self._wait_for_state_change(server, expected_state)
|
||||
|
||||
def _attach_interface(self, server, port_uuid):
|
||||
|
@ -419,14 +418,14 @@ class InstanceHelperMixin:
|
|||
}
|
||||
}
|
||||
attachment = self.api.attach_interface(server['id'], body)
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.interface_attach.end')
|
||||
return attachment
|
||||
|
||||
def _detach_interface(self, server, port_uuid):
|
||||
"""detach a neutron port form a server."""
|
||||
self.api.detach_interface(server['id'], port_uuid)
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.interface_detach.end')
|
||||
|
||||
def _rebuild_server(self, server, image_uuid, expected_state='ACTIVE'):
|
||||
|
@ -434,7 +433,7 @@ class InstanceHelperMixin:
|
|||
self.api.post_server_action(
|
||||
server['id'], {'rebuild': {'imageRef': image_uuid}},
|
||||
)
|
||||
fake_notifier.wait_for_versioned_notifications('instance.rebuild.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.rebuild.end')
|
||||
return self._wait_for_state_change(server, expected_state)
|
||||
|
||||
def _migrate_server(self, server, host=None):
|
||||
|
@ -446,7 +445,7 @@ class InstanceHelperMixin:
|
|||
def _resize_server(self, server, flavor_id):
|
||||
self.api.post_server_action(
|
||||
server['id'], {'resize': {'flavorRef': flavor_id}})
|
||||
fake_notifier.wait_for_versioned_notifications('instance.resize.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.resize.end')
|
||||
return self._wait_for_state_change(server, 'VERIFY_RESIZE')
|
||||
|
||||
def _confirm_resize(self, server, *, cross_cell=False):
|
||||
|
@ -469,7 +468,7 @@ class InstanceHelperMixin:
|
|||
# dest host revert_resize method but the allocations are cleaned up
|
||||
# in the source host finish_revert_resize method so we need to wait
|
||||
# for the finish_revert_resize method to complete.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.resize_revert.end')
|
||||
return server
|
||||
|
||||
|
@ -488,13 +487,13 @@ class InstanceHelperMixin:
|
|||
def _suspend_server(self, server, expected_state='SUSPENDED'):
|
||||
"""Suspend a server."""
|
||||
self.api.post_server_action(server['id'], {'suspend': {}})
|
||||
fake_notifier.wait_for_versioned_notifications('instance.suspend.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.suspend.end')
|
||||
return self._wait_for_state_change(server, expected_state)
|
||||
|
||||
def _resume_server(self, server, expected_state='ACTIVE'):
|
||||
"""Resume a server."""
|
||||
self.api.post_server_action(server['id'], {'resume': {}})
|
||||
fake_notifier.wait_for_versioned_notifications('instance.resume.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.resume.end')
|
||||
return self._wait_for_state_change(server, expected_state)
|
||||
|
||||
def _shelve_server(self, server, expected_state='SHELVED_OFFLOADED'):
|
||||
|
@ -914,7 +913,7 @@ class PlacementInstanceHelperMixin(InstanceHelperMixin, PlacementHelperMixin):
|
|||
# instance.delete.end notification as that is emitted after the
|
||||
# resources are freed.
|
||||
|
||||
fake_notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
|
||||
for rp_uuid in [
|
||||
self._get_provider_uuid_by_host(hostname)
|
||||
|
@ -1115,8 +1114,8 @@ class _IntegratedTestBase(test.TestCase, PlacementInstanceHelperMixin):
|
|||
self.glance = self.useFixture(nova_fixtures.GlanceFixture(self))
|
||||
self.policy = self.useFixture(nova_fixtures.RealPolicyFixture())
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
self._setup_services()
|
||||
|
||||
|
@ -1186,8 +1185,8 @@ class ProviderUsageBaseTestCase(test.TestCase, PlacementInstanceHelperMixin):
|
|||
self.placement = self.useFixture(func_fixtures.PlacementFixture()).api
|
||||
self.useFixture(nova_fixtures.AllServicesCurrent())
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
self.api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
|
||||
api_version='v2.1'))
|
||||
|
|
|
@ -31,7 +31,6 @@ from nova.tests.fixtures import libvirt as fakelibvirt
|
|||
from nova.tests.functional import fixtures as func_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_network
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova.virt.libvirt import config as libvirt_config
|
||||
|
||||
CONF = conf.CONF
|
||||
|
@ -431,8 +430,8 @@ class _LibvirtEvacuateTest(integrated_helpers.InstanceHelperMixin):
|
|||
# force_down and evacuate without onSharedStorage
|
||||
self.api.microversion = '2.14'
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
self.useFixture(nova_fixtures.LibvirtFixture())
|
||||
|
||||
|
@ -523,7 +522,7 @@ class _LibvirtEvacuateTest(integrated_helpers.InstanceHelperMixin):
|
|||
expected_task_state=None, expected_migration_status='failed')
|
||||
|
||||
# Wait for the rebuild to start, then complete
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.rebuild.start')
|
||||
|
||||
# Meta-test
|
||||
|
|
|
@ -27,7 +27,6 @@ from nova.tests import fixtures as nova_fixtures
|
|||
from nova.tests.fixtures import libvirt as fakelibvirt
|
||||
from nova.tests.functional.api import client
|
||||
from nova.tests.functional.libvirt import base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -1252,9 +1251,6 @@ class NUMAServersRebuildTests(NUMAServersTestBase):
|
|||
self.image_ref_0 = images[0]['id']
|
||||
self.image_ref_1 = images[1]['id']
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
|
||||
def _create_active_server(self, server_args=None):
|
||||
basic_server = {
|
||||
'flavorRef': 1,
|
||||
|
|
|
@ -35,7 +35,6 @@ from nova.tests import fixtures as nova_fixtures
|
|||
from nova.tests.fixtures import libvirt as fakelibvirt
|
||||
from nova.tests.functional.api import client
|
||||
from nova.tests.functional.libvirt import base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -711,14 +710,14 @@ class SRIOVAttachDetachTest(_PCIServersTestBase):
|
|||
|
||||
def _detach_port(self, instance_uuid, port_id):
|
||||
self.api.detach_interface(instance_uuid, port_id)
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.interface_detach.end')
|
||||
|
||||
def _attach_port(self, instance_uuid, port_id):
|
||||
self.api.attach_interface(
|
||||
instance_uuid,
|
||||
{'interfaceAttachment': {'port_id': port_id}})
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.interface_attach.end')
|
||||
|
||||
def _test_detach_attach(self, first_port_id, second_port_id):
|
||||
|
|
|
@ -27,7 +27,6 @@ from nova.tests.functional import integrated_helpers
|
|||
from nova.tests import json_ref
|
||||
from nova.tests.unit.api.openstack.compute import test_services
|
||||
from nova.tests.unit import fake_crypto
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
@ -75,8 +74,8 @@ class NotificationSampleTestBase(test.TestCase,
|
|||
self.api.microversion = max_version
|
||||
self.admin_api.microversion = max_version
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
self.useFixture(utils_fixture.TimeFixture(test_services.fake_utcnow()))
|
||||
self.useFixture(nova_fixtures.GlanceFixture(self))
|
||||
|
@ -92,7 +91,7 @@ class NotificationSampleTestBase(test.TestCase,
|
|||
self.start_service('scheduler')
|
||||
self.compute = self.start_service('compute')
|
||||
# Reset the service create notifications
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
def _get_notification_sample(self, sample):
|
||||
sample_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
@ -136,9 +135,9 @@ class NotificationSampleTestBase(test.TestCase,
|
|||
notification emitted during the test.
|
||||
"""
|
||||
if not actual:
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS),
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS)
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications),
|
||||
self.notifier.versioned_notifications)
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
else:
|
||||
notification = actual
|
||||
sample_file = self._get_notification_sample(sample_file_name)
|
||||
|
@ -163,12 +162,12 @@ class NotificationSampleTestBase(test.TestCase,
|
|||
scheduler_expected_notifications = [
|
||||
'scheduler-select_destinations-start',
|
||||
'scheduler-select_destinations-end']
|
||||
self.assertLessEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertLessEqual(2, len(self.notifier.versioned_notifications))
|
||||
for notification in scheduler_expected_notifications:
|
||||
self._verify_notification(
|
||||
notification,
|
||||
replacements=replacements,
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS.pop(0))
|
||||
actual=self.notifier.versioned_notifications.pop(0))
|
||||
|
||||
def _boot_a_server(self, expected_status='ACTIVE', extra_params=None,
|
||||
scheduler_hints=None, additional_extra_specs=None):
|
||||
|
@ -191,7 +190,7 @@ class NotificationSampleTestBase(test.TestCase,
|
|||
self.admin_api.post_extra_spec(flavor_id, extra_specs)
|
||||
|
||||
# Ignore the create flavor notification
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
keypair_req = {
|
||||
"keypair": {
|
||||
|
@ -204,11 +203,11 @@ class NotificationSampleTestBase(test.TestCase,
|
|||
'keypair-import-start',
|
||||
'keypair-import-end'
|
||||
]
|
||||
self.assertLessEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertLessEqual(2, len(self.notifier.versioned_notifications))
|
||||
for notification in keypair_expected_notifications:
|
||||
self._verify_notification(
|
||||
notification,
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS.pop(0))
|
||||
actual=self.notifier.versioned_notifications.pop(0))
|
||||
|
||||
server = self._build_server(
|
||||
name='some-server',
|
||||
|
@ -252,19 +251,19 @@ class NotificationSampleTestBase(test.TestCase,
|
|||
|
||||
def _get_notifications(self, event_type):
|
||||
return [notification for notification
|
||||
in fake_notifier.VERSIONED_NOTIFICATIONS
|
||||
in self.notifier.versioned_notifications
|
||||
if notification['event_type'] == event_type]
|
||||
|
||||
def _wait_for_notification(self, event_type, timeout=10.0):
|
||||
# NOTE(mdbooth): wait_for_versioned_notifications raises an exception
|
||||
# if it times out since change I017d1a31. Consider removing this
|
||||
# method.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
event_type, timeout=timeout)
|
||||
|
||||
def _wait_for_notifications(self, event_type, expected_count,
|
||||
timeout=10.0):
|
||||
notifications = fake_notifier.wait_for_versioned_notifications(
|
||||
notifications = self.notifier.wait_for_versioned_notifications(
|
||||
event_type, n_events=expected_count, timeout=timeout)
|
||||
msg = ''.join('\n%s' % notif for notif in notifications)
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
# under the License.
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestAggregateNotificationSample(
|
||||
|
@ -28,34 +27,34 @@ class TestAggregateNotificationSample(
|
|||
"availability_zone": "nova"}}
|
||||
aggregate = self.admin_api.post_aggregate(aggregate_req)
|
||||
|
||||
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(2, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'aggregate-create-start',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
self._verify_notification(
|
||||
'aggregate-create-end',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
||||
self.admin_api.delete_aggregate(aggregate['id'])
|
||||
|
||||
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(4, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'aggregate-delete-start',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
|
||||
actual=self.notifier.versioned_notifications[2])
|
||||
self._verify_notification(
|
||||
'aggregate-delete-end',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
|
||||
actual=self.notifier.versioned_notifications[3])
|
||||
|
||||
def test_aggregate_add_remove_host(self):
|
||||
aggregate_req = {
|
||||
|
@ -64,7 +63,7 @@ class TestAggregateNotificationSample(
|
|||
"availability_zone": "nova"}}
|
||||
aggregate = self.admin_api.post_aggregate(aggregate_req)
|
||||
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
add_host_req = {
|
||||
"add_host": {
|
||||
|
@ -73,19 +72,19 @@ class TestAggregateNotificationSample(
|
|||
}
|
||||
self.admin_api.post_aggregate_action(aggregate['id'], add_host_req)
|
||||
|
||||
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(2, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'aggregate-add_host-start',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
self._verify_notification(
|
||||
'aggregate-add_host-end',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
||||
remove_host_req = {
|
||||
"remove_host": {
|
||||
|
@ -94,19 +93,19 @@ class TestAggregateNotificationSample(
|
|||
}
|
||||
self.admin_api.post_aggregate_action(aggregate['id'], remove_host_req)
|
||||
|
||||
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(4, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'aggregate-remove_host-start',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
|
||||
actual=self.notifier.versioned_notifications[2])
|
||||
self._verify_notification(
|
||||
'aggregate-remove_host-end',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
|
||||
actual=self.notifier.versioned_notifications[3])
|
||||
|
||||
self.admin_api.delete_aggregate(aggregate['id'])
|
||||
|
||||
|
@ -124,22 +123,22 @@ class TestAggregateNotificationSample(
|
|||
}
|
||||
}
|
||||
}
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
self.admin_api.post_aggregate_action(aggregate['id'], set_metadata_req)
|
||||
|
||||
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(2, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'aggregate-update_metadata-start',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
self._verify_notification(
|
||||
'aggregate-update_metadata-end',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
||||
def test_aggregate_updateprops(self):
|
||||
aggregate_req = {
|
||||
|
@ -157,19 +156,19 @@ class TestAggregateNotificationSample(
|
|||
# 1. aggregate-create-end
|
||||
# 2. aggregate-update_prop-start
|
||||
# 3. aggregate-update_prop-end
|
||||
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(4, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'aggregate-update_prop-start',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
|
||||
actual=self.notifier.versioned_notifications[2])
|
||||
self._verify_notification(
|
||||
'aggregate-update_prop-end',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
|
||||
actual=self.notifier.versioned_notifications[3])
|
||||
|
||||
def test_aggregate_cache_images(self):
|
||||
aggregate_req = {
|
||||
|
@ -184,7 +183,7 @@ class TestAggregateNotificationSample(
|
|||
}
|
||||
self.admin_api.post_aggregate_action(aggregate['id'], add_host_req)
|
||||
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
cache_images_req = {
|
||||
'cache': [
|
||||
|
@ -195,26 +194,26 @@ class TestAggregateNotificationSample(
|
|||
cache_images_req)
|
||||
# Since the operation is asynchronous we have to wait for the end
|
||||
# notification.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'aggregate.cache_images.end')
|
||||
|
||||
self.assertEqual(3, len(fake_notifier.VERSIONED_NOTIFICATIONS),
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS)
|
||||
self.assertEqual(3, len(self.notifier.versioned_notifications),
|
||||
self.notifier.versioned_notifications)
|
||||
self._verify_notification(
|
||||
'aggregate-cache_images-start',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
self._verify_notification(
|
||||
'aggregate-cache_images-progress',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
self._verify_notification(
|
||||
'aggregate-cache_images-end',
|
||||
replacements={
|
||||
'uuid': aggregate['uuid'],
|
||||
'id': aggregate['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
|
||||
actual=self.notifier.versioned_notifications[2])
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
from nova.tests import fixtures
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestComputeTaskNotificationSample(
|
||||
|
@ -38,8 +37,8 @@ class TestComputeTaskNotificationSample(
|
|||
self._wait_for_notification('compute_task.build_instances.error')
|
||||
# 0. scheduler.select_destinations.start
|
||||
# 1. compute_task.rebuild_server.error
|
||||
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS),
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS)
|
||||
self.assertEqual(2, len(self.notifier.versioned_notifications),
|
||||
self.notifier.versioned_notifications)
|
||||
self._verify_notification(
|
||||
'compute_task-build_instances-error',
|
||||
replacements={
|
||||
|
@ -52,7 +51,7 @@ class TestComputeTaskNotificationSample(
|
|||
'reason.module_name': self.ANY,
|
||||
'reason.traceback': self.ANY
|
||||
},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
||||
def test_rebuild_fault(self):
|
||||
server = self._boot_a_server(
|
||||
|
@ -65,7 +64,7 @@ class TestComputeTaskNotificationSample(
|
|||
service_id = self.api.get_service_id('nova-compute')
|
||||
self.admin_api.put_service_force_down(service_id, True)
|
||||
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
# NOTE(takashin): The rebuild action and the evacuate action shares
|
||||
# same code path. So the 'evacuate' action is used for this test.
|
||||
|
@ -76,8 +75,8 @@ class TestComputeTaskNotificationSample(
|
|||
# 0. instance.evacuate
|
||||
# 1. scheduler.select_destinations.start
|
||||
# 2. compute_task.rebuild_server.error
|
||||
self.assertEqual(3, len(fake_notifier.VERSIONED_NOTIFICATIONS),
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS)
|
||||
self.assertEqual(3, len(self.notifier.versioned_notifications),
|
||||
self.notifier.versioned_notifications)
|
||||
self._verify_notification(
|
||||
'compute_task-rebuild_server-error',
|
||||
replacements={
|
||||
|
@ -90,7 +89,7 @@ class TestComputeTaskNotificationSample(
|
|||
'reason.module_name': self.ANY,
|
||||
'reason.traceback': self.ANY
|
||||
},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
|
||||
actual=self.notifier.versioned_notifications[2])
|
||||
|
||||
def test_migrate_fault(self):
|
||||
server = self._boot_a_server(
|
||||
|
@ -103,14 +102,14 @@ class TestComputeTaskNotificationSample(
|
|||
service_id = self.api.get_service_id('nova-compute')
|
||||
self.admin_api.put_service(service_id, {'status': 'disabled'})
|
||||
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
# Note that the operation will return a 202 response but fail with
|
||||
# NoValidHost asynchronously.
|
||||
self.admin_api.post_server_action(server['id'], {'migrate': None})
|
||||
self._wait_for_notification('compute_task.migrate_server.error')
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS),
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS)
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications),
|
||||
self.notifier.versioned_notifications)
|
||||
self._verify_notification(
|
||||
'compute_task-migrate_server-error',
|
||||
replacements={
|
||||
|
@ -124,4 +123,4 @@ class TestComputeTaskNotificationSample(
|
|||
'reason.module_name': self.ANY,
|
||||
'reason.traceback': self.ANY
|
||||
},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
from nova.tests.functional.api import client as api_client
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestExceptionNotificationSample(
|
||||
|
@ -33,12 +32,12 @@ class TestExceptionNotificationSample(
|
|||
self.assertRaises(api_client.OpenStackApiException,
|
||||
self.admin_api.api_post, 'os-aggregates', post)
|
||||
|
||||
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
traceback = fake_notifier.VERSIONED_NOTIFICATIONS[3][
|
||||
self.assertEqual(4, len(self.notifier.versioned_notifications))
|
||||
traceback = self.notifier.versioned_notifications[3][
|
||||
'payload']['nova_object.data']['traceback']
|
||||
self.assertIn('AggregateNameExists', traceback)
|
||||
self._verify_notification(
|
||||
'compute-exception',
|
||||
replacements={
|
||||
'traceback': self.ANY},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
|
||||
actual=self.notifier.versioned_notifications[3])
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestFlavorNotificationSample(
|
||||
|
@ -48,7 +47,7 @@ class TestFlavorNotificationSample(
|
|||
self.admin_api.api_delete(
|
||||
'flavors/a22d5517-147c-4147-a0d1-e698df5cd4e3')
|
||||
self._verify_notification(
|
||||
'flavor-delete', actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
'flavor-delete', actual=self.notifier.versioned_notifications[1])
|
||||
|
||||
def test_flavor_update(self):
|
||||
body = {
|
||||
|
@ -84,7 +83,7 @@ class TestFlavorNotificationSample(
|
|||
body)
|
||||
|
||||
self._verify_notification(
|
||||
'flavor-update', actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
|
||||
'flavor-update', actual=self.notifier.versioned_notifications[2])
|
||||
|
||||
|
||||
class TestFlavorNotificationSamplev2_55(
|
||||
|
@ -110,11 +109,11 @@ class TestFlavorNotificationSamplev2_55(
|
|||
flavor = self.admin_api.api_post('flavors', body).body['flavor']
|
||||
# Check the notification; should be the same as the sample where there
|
||||
# is no description set.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'flavor-create',
|
||||
replacements={'is_public': False},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
|
||||
# Update and set the flavor description.
|
||||
self.admin_api.api_put(
|
||||
|
@ -122,10 +121,10 @@ class TestFlavorNotificationSamplev2_55(
|
|||
{'flavor': {'description': 'test description'}}).body['flavor']
|
||||
|
||||
# Assert the notifications, one for create and one for update.
|
||||
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(2, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'flavor-update',
|
||||
replacements={'description': 'test description',
|
||||
'extra_specs': {},
|
||||
'projects': []},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -11,7 +11,6 @@
|
|||
# under the License.
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestKeypairNotificationSample(
|
||||
|
@ -26,35 +25,35 @@ class TestKeypairNotificationSample(
|
|||
}}
|
||||
keypair = self.api.post_keypair(keypair_req)
|
||||
|
||||
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(2, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'keypair-create-start',
|
||||
replacements={},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
self._verify_notification(
|
||||
'keypair-create-end',
|
||||
replacements={
|
||||
"fingerprint": keypair['fingerprint'],
|
||||
"public_key": keypair['public_key']
|
||||
},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
||||
self.api.delete_keypair(keypair['name'])
|
||||
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(4, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'keypair-delete-start',
|
||||
replacements={
|
||||
"fingerprint": keypair['fingerprint'],
|
||||
"public_key": keypair['public_key']
|
||||
},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
|
||||
actual=self.notifier.versioned_notifications[2])
|
||||
self._verify_notification(
|
||||
'keypair-delete-end',
|
||||
replacements={
|
||||
"fingerprint": keypair['fingerprint'],
|
||||
"public_key": keypair['public_key']
|
||||
},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
|
||||
actual=self.notifier.versioned_notifications[3])
|
||||
|
||||
def test_keypair_import(self):
|
||||
pub_key = ('ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDx8nkQv/zgGg'
|
||||
|
@ -71,10 +70,10 @@ class TestKeypairNotificationSample(
|
|||
|
||||
self.api.post_keypair(keypair_req)
|
||||
|
||||
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(2, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'keypair-import-start',
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
self._verify_notification(
|
||||
'keypair-import-end',
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
|
|
@ -21,7 +21,6 @@ from nova.tests import fixtures as nova_fixtures
|
|||
from nova.tests.fixtures import libvirt as fakelibvirt
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova.virt.libvirt import host
|
||||
|
||||
|
||||
|
@ -45,7 +44,7 @@ class TestLibvirtErrorNotificationSample(
|
|||
self.assertRaises(exception.HypervisorUnavailable,
|
||||
self.restart_compute_service, self.compute)
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'libvirt-connect-error',
|
||||
replacements={
|
||||
|
@ -54,4 +53,4 @@ class TestLibvirtErrorNotificationSample(
|
|||
'reason.module_name': self.ANY,
|
||||
'reason.traceback': self.ANY
|
||||
},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
|
|
|
@ -16,7 +16,6 @@ import nova.conf
|
|||
from nova import context
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
CONF = nova.conf.CONF
|
||||
|
@ -35,8 +34,8 @@ class TestMetricsNotificationSample(
|
|||
self.compute.manager.update_available_resource(
|
||||
context.get_admin_context())
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'metrics-update',
|
||||
replacements={'host_ip': CONF.my_ip},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
from nova.tests import fixtures
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestServerGroupNotificationSample(
|
||||
|
@ -31,20 +30,20 @@ class TestServerGroupNotificationSample(
|
|||
}
|
||||
group = self.api.post_server_groups(group_req)
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'server_group-create',
|
||||
replacements={'uuid': group['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
self.api.delete_server_group(group['id'])
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'server_group-delete',
|
||||
replacements={'uuid': group['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
|
||||
def test_server_group_add_member(self):
|
||||
group_req = {
|
||||
|
@ -53,7 +52,7 @@ class TestServerGroupNotificationSample(
|
|||
"rules": {"max_server_per_host": 3}
|
||||
}
|
||||
group = self.api.post_server_groups(group_req)
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
server = self._boot_a_server(
|
||||
extra_params={'networks': [{'port': self.neutron.port_1['id']}]},
|
||||
|
@ -66,9 +65,9 @@ class TestServerGroupNotificationSample(
|
|||
# 4: instance.create.end
|
||||
# 5: instance.update
|
||||
# (Due to adding server tags in the '_boot_a_server' method.)
|
||||
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(6, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'server_group-add_member',
|
||||
replacements={'uuid': group['id'],
|
||||
'members': [server['id']]},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
|
|
|
@ -20,7 +20,6 @@ from nova.tests import fixtures
|
|||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit.api.openstack.compute import test_services
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestServiceNotificationBase(
|
||||
|
@ -167,4 +166,4 @@ class TestServiceNotificationSample(TestServiceNotificationBase):
|
|||
self._verify_notification(
|
||||
'service-delete',
|
||||
replacements={'uuid': compute2_service_id},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
|
|
@ -16,7 +16,6 @@ from nova import context
|
|||
from nova.tests import fixtures
|
||||
from nova.tests.functional.notification_sample_tests \
|
||||
import notification_sample_base
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestVolumeUsageNotificationSample(
|
||||
|
@ -34,7 +33,7 @@ class TestVolumeUsageNotificationSample(
|
|||
server = self._boot_a_server(
|
||||
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
|
||||
self._attach_volume_to_server(server, self.cinder.SWAP_OLD_VOL)
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
return server
|
||||
|
||||
|
@ -47,19 +46,19 @@ class TestVolumeUsageNotificationSample(
|
|||
# 0. volume_detach-start
|
||||
# 1. volume.usage
|
||||
# 2. volume_detach-end
|
||||
self.assertEqual(3, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(3, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'volume-usage',
|
||||
replacements={'instance_uuid': server['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
|
||||
actual=self.notifier.versioned_notifications[1])
|
||||
|
||||
def test_instance_poll_volume_usage(self):
|
||||
server = self._setup_server_with_volume_attached()
|
||||
|
||||
self.compute.manager._poll_volume_usage(context.get_admin_context())
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self._verify_notification(
|
||||
'volume-usage',
|
||||
replacements={'instance_uuid': server['id']},
|
||||
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
|
||||
actual=self.notifier.versioned_notifications[0])
|
||||
|
|
|
@ -91,8 +91,8 @@ class FailedEvacuateStateTests(test.TestCase,
|
|||
host=self.hostname, binary='nova-compute')[0]['id']
|
||||
self.api.put_service(compute_id, {'forced_down': 'true'})
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
fake_notifier.reset()
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
# Initiate evacuation
|
||||
self._evacuate_server(
|
||||
|
|
|
@ -16,7 +16,6 @@ from nova.tests import fixtures as nova_fixtures
|
|||
from nova.tests.functional import fixtures as func_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_network
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestParallelEvacuationWithServerGroup(
|
||||
|
@ -47,8 +46,8 @@ class TestParallelEvacuationWithServerGroup(
|
|||
# 2.14 is needed for evacuate without onSharedStorage flag
|
||||
self.api.microversion = '2.14'
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
# the image fake backend needed for image discovery
|
||||
self.useFixture(nova_fixtures.GlanceFixture(self))
|
||||
|
@ -77,7 +76,7 @@ class TestParallelEvacuationWithServerGroup(
|
|||
# validation
|
||||
if instance.host == 'host1':
|
||||
# wait for the other instance rebuild to start
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.rebuild.start', n_events=1)
|
||||
|
||||
original_rebuild(self_, context, instance, *args, **kwargs)
|
||||
|
@ -127,7 +126,7 @@ class TestParallelEvacuationWithServerGroup(
|
|||
# NOTE(mdbooth): We only get 1 rebuild.start notification here because
|
||||
# we validate server group policy (and therefore fail) before emitting
|
||||
# rebuild.start.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.rebuild.start', n_events=1)
|
||||
server1 = self._wait_for_server_parameter(
|
||||
server1, {'OS-EXT-STS:task_state': None})
|
||||
|
|
|
@ -16,7 +16,6 @@ from nova.tests import fixtures as nova_fixtures
|
|||
from nova.tests.functional import fixtures as func_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_network
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class TestEvacuationWithSourceReturningDuringRebuild(
|
||||
|
@ -63,8 +62,6 @@ class TestEvacuationWithSourceReturningDuringRebuild(
|
|||
self.image_id = self.api.get_images()[0]['id']
|
||||
self.flavor_id = self.api.get_flavors()[0]['id']
|
||||
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
|
||||
# Stub out rebuild with a slower method allowing the src compute to be
|
||||
# restarted once the migration hits pre-migrating after claiming
|
||||
# resources on the dest.
|
||||
|
|
|
@ -20,7 +20,6 @@ from nova import test
|
|||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional import fixtures as func_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class RescheduleBuildAvailabilityZoneUpCall(
|
||||
|
@ -51,8 +50,8 @@ class RescheduleBuildAvailabilityZoneUpCall(
|
|||
self.start_service('compute', host='host1')
|
||||
self.start_service('compute', host='host2')
|
||||
# Listen for notifications.
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
def test_server_create_reschedule_blocked_az_up_call(self):
|
||||
self.flags(default_availability_zone='us-central')
|
||||
|
@ -80,7 +79,7 @@ class RescheduleBuildAvailabilityZoneUpCall(
|
|||
# Because we poisoned AggregateList.get_by_host after hitting the
|
||||
# compute service we have to wait for the notification that the build
|
||||
# is complete and then stop the mock so we can use the API again.
|
||||
fake_notifier.wait_for_versioned_notifications('instance.create.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.create.end')
|
||||
# Note that we use stopall here because we actually called
|
||||
# build_and_run_instance twice so we have more than one instance of
|
||||
# the mock that needs to be stopped.
|
||||
|
@ -116,8 +115,8 @@ class RescheduleMigrateAvailabilityZoneUpCall(
|
|||
self.start_service('compute', host='host2')
|
||||
self.start_service('compute', host='host3')
|
||||
# Listen for notifications.
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
def test_migrate_reschedule_blocked_az_up_call(self):
|
||||
self.flags(default_availability_zone='us-central')
|
||||
|
@ -153,7 +152,7 @@ class RescheduleMigrateAvailabilityZoneUpCall(
|
|||
# Because we poisoned AggregateList.get_by_host after hitting the
|
||||
# compute service we have to wait for the notification that the resize
|
||||
# is complete and then stop the mock so we can use the API again.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.resize_finish.end')
|
||||
# Note that we use stopall here because we actually called _prep_resize
|
||||
# twice so we have more than one instance of the mock that needs to be
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
from nova import test
|
||||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class ShowErrorServerWithTags(test.TestCase,
|
||||
|
@ -41,8 +40,6 @@ class ShowErrorServerWithTags(test.TestCase,
|
|||
|
||||
self.api.microversion = 'latest'
|
||||
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
|
||||
def _create_error_server(self):
|
||||
server = self.api.post_server({
|
||||
'server': {
|
||||
|
|
|
@ -14,7 +14,6 @@ from nova import test
|
|||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional import fixtures as func_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class RegressionTest1835822(
|
||||
|
@ -39,8 +38,8 @@ class RegressionTest1835822(
|
|||
self.image_ref_0 = images[0]['id']
|
||||
self.image_ref_1 = images[1]['id']
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
def _create_active_server(self, server_args=None):
|
||||
basic_server = {
|
||||
|
|
|
@ -15,7 +15,6 @@ import nova.compute
|
|||
from nova import exception
|
||||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class PinnedComputeRpcTests(integrated_helpers.ProviderUsageBaseTestCase):
|
||||
|
@ -28,8 +27,6 @@ class PinnedComputeRpcTests(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
self.useFixture(nova_fixtures.HostNameWeigherFixture())
|
||||
|
||||
super(PinnedComputeRpcTests, self).setUp()
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
|
||||
self.compute1 = self._start_compute(host='host1')
|
||||
self.compute2 = self._start_compute(host='host2')
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
from nova import context
|
||||
from nova import objects
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class RebuildWithKeypairTestCase(integrated_helpers._IntegratedTestBase):
|
||||
|
@ -59,7 +58,7 @@ class RebuildWithKeypairTestCase(integrated_helpers._IntegratedTestBase):
|
|||
},
|
||||
}
|
||||
self.api.api_post('servers/%s/action' % server['id'], body)
|
||||
fake_notifier.wait_for_versioned_notifications('instance.rebuild.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.rebuild.end')
|
||||
self._wait_for_state_change(server, 'ACTIVE')
|
||||
|
||||
# Check keypairs changed
|
||||
|
|
|
@ -16,7 +16,6 @@ from nova import test
|
|||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional import fixtures as func_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class UnshelveNeutronErrorTest(
|
||||
|
@ -34,8 +33,8 @@ class UnshelveNeutronErrorTest(
|
|||
self.api = self.useFixture(nova_fixtures.OSAPIFixture(
|
||||
api_version='v2.1')).admin_api
|
||||
self.api.microversion = 'latest'
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
self.start_service('conductor')
|
||||
self.start_service('scheduler')
|
||||
|
@ -73,7 +72,7 @@ class UnshelveNeutronErrorTest(
|
|||
reason='test')
|
||||
req = {'unshelve': None}
|
||||
self.api.post_server_action(server['id'], req)
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.unshelve.start')
|
||||
self._wait_for_server_parameter(
|
||||
server,
|
||||
|
|
|
@ -17,7 +17,6 @@ from nova import context as nova_context
|
|||
from nova import objects
|
||||
from nova import test
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class ColdMigrationDisallowSameHost(
|
||||
|
@ -34,7 +33,7 @@ class ColdMigrationDisallowSameHost(
|
|||
self._start_compute('host1')
|
||||
|
||||
def _wait_for_migrate_no_valid_host(self, error='NoValidHost'):
|
||||
event = fake_notifier.wait_for_versioned_notifications(
|
||||
event = self.notifier.wait_for_versioned_notifications(
|
||||
'compute_task.migrate_server.error')[0]
|
||||
self.assertEqual(error,
|
||||
event['payload']['nova_object.data']['reason'][
|
||||
|
|
|
@ -31,7 +31,6 @@ from nova.scheduler import weights
|
|||
from nova import test
|
||||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova import utils
|
||||
|
||||
CONF = conf.CONF
|
||||
|
@ -421,7 +420,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
body = {'volumeAttachment': {'volumeId': volume_id}}
|
||||
self.api.api_post(
|
||||
'/servers/%s/os-volume_attachments' % server_id, body)
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.volume_attach.end')
|
||||
|
||||
def _detach_volume_from_server(self, server_id, volume_id):
|
||||
|
@ -430,7 +429,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
"""
|
||||
self.api.api_delete(
|
||||
'/servers/%s/os-volume_attachments/%s' % (server_id, volume_id))
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.volume_detach.end')
|
||||
|
||||
def assert_volume_is_attached(self, server_id, volume_id):
|
||||
|
@ -453,13 +452,13 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
# We should have gotten only two notifications:
|
||||
# 1. instance.resize_confirm.start
|
||||
# 2. instance.resize_confirm.end
|
||||
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS),
|
||||
self.assertEqual(2, len(self.notifier.versioned_notifications),
|
||||
'Unexpected number of versioned notifications for '
|
||||
'cross-cell resize confirm: %s' %
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS)
|
||||
start = fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type']
|
||||
self.notifier.versioned_notifications)
|
||||
start = self.notifier.versioned_notifications[0]['event_type']
|
||||
self.assertEqual('instance.resize_confirm.start', start)
|
||||
end = fake_notifier.VERSIONED_NOTIFICATIONS[1]['event_type']
|
||||
end = self.notifier.versioned_notifications[1]['event_type']
|
||||
self.assertEqual('instance.resize_confirm.end', end)
|
||||
|
||||
def delete_server_and_assert_cleanup(self, server,
|
||||
|
@ -538,7 +537,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
self._attach_volume_to_server(server['id'], uuids.fake_volume_id)
|
||||
|
||||
# Reset the fake notifier so we only check confirmation notifications.
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
# Confirm the resize and check all the things. The instance and its
|
||||
# related records should be gone from the source cell database; the
|
||||
|
@ -634,15 +633,15 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
# 1. instance.resize_revert.start (from target compute host)
|
||||
# 2. instance.exists (from target compute host)
|
||||
# 3. instance.resize_revert.end (from source compute host)
|
||||
self.assertEqual(3, len(fake_notifier.VERSIONED_NOTIFICATIONS),
|
||||
self.assertEqual(3, len(self.notifier.versioned_notifications),
|
||||
'Unexpected number of versioned notifications for '
|
||||
'cross-cell resize revert: %s' %
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS)
|
||||
start = fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type']
|
||||
self.notifier.versioned_notifications)
|
||||
start = self.notifier.versioned_notifications[0]['event_type']
|
||||
self.assertEqual('instance.resize_revert.start', start)
|
||||
exists = fake_notifier.VERSIONED_NOTIFICATIONS[1]['event_type']
|
||||
exists = self.notifier.versioned_notifications[1]['event_type']
|
||||
self.assertEqual('instance.exists', exists)
|
||||
end = fake_notifier.VERSIONED_NOTIFICATIONS[2]['event_type']
|
||||
end = self.notifier.versioned_notifications[2]['event_type']
|
||||
self.assertEqual('instance.resize_revert.end', end)
|
||||
|
||||
def assert_resize_revert_actions(self, server, source_host, dest_host):
|
||||
|
@ -703,7 +702,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
self._attach_volume_to_server(server['id'], uuids.fake_volume_id)
|
||||
|
||||
# Reset the fake notifier so we only check revert notifications.
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
|
||||
# Revert the resize. The server should be re-spawned in the source
|
||||
# cell and removed from the target cell. The allocations
|
||||
|
@ -718,7 +717,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
# instance.resize_revert.end notification because the migration.status
|
||||
# is changed to "reverted" *after* the instance status is changed to
|
||||
# ACTIVE.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.resize_revert.end')
|
||||
migrations = self.api.api_get(
|
||||
'/os-migrations?instance_uuid=%s' % server['id']
|
||||
|
@ -799,7 +798,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
self.api.post_server_action(server['id'], {'revertResize': None})
|
||||
server = self._wait_for_state_change(server, 'ACTIVE')
|
||||
self._wait_for_migration_status(server, ['reverted'])
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.resize_revert.end')
|
||||
self.assert_volume_is_detached(server['id'], uuids.fake_volume_id)
|
||||
# Delete the server and make sure we did not leak anything.
|
||||
|
@ -980,7 +979,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
# _poll_unconfirmed_resizes periodic task and run it on the target
|
||||
# compute service.
|
||||
# Reset the fake notifier so we only check confirmation notifications.
|
||||
fake_notifier.reset()
|
||||
self.notifier.reset()
|
||||
self.flags(resize_confirm_window=1)
|
||||
# Stub timeutils so the DB API query finds the unconfirmed migration.
|
||||
future = timeutils.utcnow() + datetime.timedelta(hours=1)
|
||||
|
@ -1071,7 +1070,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
self.assertEqual(4, server['OS-EXT-STS:power_state'],
|
||||
"Unexpected power state after revertResize.")
|
||||
self._wait_for_migration_status(server, ['reverted'])
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.resize_revert.end')
|
||||
|
||||
# Now try cold-migrating to cell2 to make sure there is no
|
||||
|
@ -1146,7 +1145,7 @@ class TestMultiCellMigrate(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
# to wait for something which happens after that, which is the
|
||||
# ComputeTaskManager._cold_migrate method sending the
|
||||
# compute_task.migrate_server.error event.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'compute_task.migrate_server.error')
|
||||
mig_uuid = self.get_migration_uuid_for_instance(server['id'])
|
||||
mig_allocs = self._get_allocations_by_server_uuid(mig_uuid)
|
||||
|
|
|
@ -14,7 +14,6 @@ from nova.compute import instance_actions
|
|||
from nova.compute import power_state
|
||||
from nova.compute import vm_states
|
||||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
class ServerExternalEventsTestV276(
|
||||
|
@ -64,9 +63,9 @@ class ServerExternalEventsTestV276(
|
|||
self.assertIn('compute_power_update', events_by_name)
|
||||
self.assertEqual('Success', detail['events'][0]['result'])
|
||||
# Test if notifications were emitted.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.power_off.start')
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.power_off.end')
|
||||
|
||||
# Checking POWER_ON
|
||||
|
@ -90,7 +89,7 @@ class ServerExternalEventsTestV276(
|
|||
self.assertIn('compute_power_update', events_by_name)
|
||||
self.assertEqual('Success', detail['events'][0]['result'])
|
||||
# Test if notifications were emitted.
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.power_on.start')
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.power_on.end')
|
||||
|
|
|
@ -49,7 +49,6 @@ from nova.tests.functional.api import client
|
|||
from nova.tests.functional import integrated_helpers
|
||||
from nova.tests.unit.api.openstack import fakes
|
||||
from nova.tests.unit import fake_block_device
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova.tests.unit import fake_requests
|
||||
from nova.tests.unit.objects import test_instance_info_cache
|
||||
from nova import utils as nova_utils
|
||||
|
@ -1629,8 +1628,6 @@ class ServerMovingTests(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
|
||||
def setUp(self):
|
||||
super(ServerMovingTests, self).setUp()
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
|
||||
self.compute1 = self._start_compute(host='host1')
|
||||
self.compute2 = self._start_compute(host='host2')
|
||||
|
@ -2036,7 +2033,7 @@ class ServerMovingTests(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
server, expected_state='ERROR', expected_host=source_hostname,
|
||||
expected_migration_status='error')
|
||||
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'compute_task.rebuild_server.error')
|
||||
self._run_periodics()
|
||||
|
||||
|
@ -3999,11 +3996,9 @@ class VolumeBackedServerTest(integrated_helpers.ProviderUsageBaseTestCase):
|
|||
|
||||
# Now shelve and unshelve the server to make sure root_gb DISK_GB
|
||||
# isn't reported for allocations after we unshelve the server.
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.api.post_server_action(server['id'], {'shelve': None})
|
||||
self._wait_for_state_change(server, 'SHELVED_OFFLOADED')
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.shelve_offload.end')
|
||||
# The server should not have any allocations since it's not currently
|
||||
# hosted on any compute service.
|
||||
|
@ -4783,7 +4778,7 @@ class ConsumerGenerationConflictTest(
|
|||
(migrations[0]['uuid'], server['id']),
|
||||
self.stdlog.logger.output)
|
||||
self._delete_server(server)
|
||||
fake_notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
|
||||
allocations = self._get_allocations_by_server_uuid(
|
||||
migrations[0]['uuid'])
|
||||
|
@ -4831,7 +4826,7 @@ class ConsumerGenerationConflictTest(
|
|||
(migrations[0]['uuid'], server['id']),
|
||||
self.stdlog.logger.output)
|
||||
self._delete_server(server)
|
||||
fake_notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
|
||||
allocations = self._get_allocations_by_server_uuid(
|
||||
migrations[0]['uuid'])
|
||||
|
@ -4882,7 +4877,7 @@ class ConsumerGenerationConflictTest(
|
|||
(migrations[0]['uuid'], server['id']),
|
||||
self.stdlog.logger.output)
|
||||
self._delete_server(server)
|
||||
fake_notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
|
||||
allocations = self._get_allocations_by_server_uuid(
|
||||
migrations[0]['uuid'])
|
||||
|
@ -4947,9 +4942,6 @@ class ConsumerGenerationConflictTest(
|
|||
server = self._boot_and_check_allocations(
|
||||
self.flavor, source_hostname)
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
|
||||
orig_put = adapter.Adapter.put
|
||||
|
||||
rsp = fake_requests.FakeResponse(
|
||||
|
@ -4991,7 +4983,7 @@ class ConsumerGenerationConflictTest(
|
|||
{'OS-EXT-SRV-ATTR:host': dest_hostname,
|
||||
'status': 'ERROR'})
|
||||
self._wait_for_migration_status(server, ['error'])
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.live_migration_post.end')
|
||||
|
||||
# 1 claim on destination, 1 normal delete on dest that fails,
|
||||
|
@ -5018,7 +5010,7 @@ class ConsumerGenerationConflictTest(
|
|||
self.stdlog.logger.output)
|
||||
|
||||
self._delete_server(server)
|
||||
fake_notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.delete.end')
|
||||
|
||||
self.assertFlavorMatchesAllocation(self.flavor, migration_uuid,
|
||||
source_rp_uuid)
|
||||
|
@ -6462,7 +6454,7 @@ class PortResourceRequestBasedSchedulingTest(
|
|||
self.api.detach_interface(
|
||||
server['id'], self.neutron.port_with_resource_request['id'])
|
||||
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.interface_detach.end')
|
||||
|
||||
updated_port = self.neutron.show_port(
|
||||
|
@ -7018,9 +7010,9 @@ class ServerMoveWithPortResourceRequestTest(
|
|||
self._wait_for_action_fail_completion(
|
||||
server, instance_actions.MIGRATE, 'compute_prep_resize')
|
||||
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.resize_prep.end')
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'compute.exception')
|
||||
|
||||
migration_uuid = self.get_migration_uuid_for_instance(server['id'])
|
||||
|
@ -7275,9 +7267,9 @@ class ServerMoveWithPortResourceRequestTest(
|
|||
self._wait_for_action_fail_completion(
|
||||
server, instance_actions.EVACUATE, 'compute_rebuild_instance')
|
||||
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.rebuild.error')
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'compute.exception')
|
||||
|
||||
# and the instance allocates from the source host
|
||||
|
@ -7562,9 +7554,9 @@ class ServerMoveWithPortResourceRequestTest(
|
|||
# Unshelve fails on host2 due to
|
||||
# update_pci_request_spec_with_allocated_interface_name fails so the
|
||||
# instance goes back to shelve offloaded state
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.unshelve.start')
|
||||
error_notification = fake_notifier.wait_for_versioned_notifications(
|
||||
error_notification = self.notifier.wait_for_versioned_notifications(
|
||||
'compute.exception')[0]
|
||||
self.assertEqual(
|
||||
'UnexpectedResourceProviderNameForPCIRequest',
|
||||
|
@ -7610,7 +7602,7 @@ class ServerMoveWithPortResourceRequestTest(
|
|||
reason='test')
|
||||
req = {'unshelve': None}
|
||||
self.api.post_server_action(server['id'], req)
|
||||
fake_notifier.wait_for_versioned_notifications(
|
||||
self.notifier.wait_for_versioned_notifications(
|
||||
'instance.unshelve.start')
|
||||
self._wait_for_server_parameter(
|
||||
server,
|
||||
|
@ -8188,7 +8180,7 @@ class AcceleratorServerOpsTest(AcceleratorServerBase):
|
|||
{'rebuild': {
|
||||
'imageRef': rebuild_image_ref,
|
||||
'OS-DCF:diskConfig': 'AUTO'}})
|
||||
fake_notifier.wait_for_versioned_notifications('instance.rebuild.end')
|
||||
self.notifier.wait_for_versioned_notifications('instance.rebuild.end')
|
||||
self._wait_for_state_change(self.server, 'ACTIVE')
|
||||
self._check_allocations_usage(self.server)
|
||||
|
||||
|
|
|
@ -149,8 +149,7 @@ class BaseTestCase(test.TestCase):
|
|||
def setUp(self):
|
||||
super(BaseTestCase, self).setUp()
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.useFixture(fixtures.NotificationFixture(self))
|
||||
|
||||
self.compute = compute_manager.ComputeManager()
|
||||
# NOTE(gibi): this is a hack to make the fake virt driver use the nodes
|
||||
|
|
|
@ -8284,8 +8284,7 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
|
||||
def setUp(self):
|
||||
super(ComputeManagerMigrationTestCase, self).setUp()
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(fixtures.NotificationFixture(self))
|
||||
self.flags(compute_driver='fake.SameHostColdMigrateDriver')
|
||||
self.compute = manager.ComputeManager()
|
||||
self.context = context.RequestContext(fakes.FAKE_USER_ID,
|
||||
|
@ -10899,10 +10898,10 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
# There would really be three notifications but because we mocked out
|
||||
# _send_prep_resize_notifications there is just the one error
|
||||
# notification from the wrap_exception decorator.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'compute.%s' % fields.NotificationAction.EXCEPTION,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
|
||||
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
|
||||
'get_allocs_for_consumer')
|
||||
|
@ -10948,10 +10947,10 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
# There would really be three notifications but because we mocked out
|
||||
# _send_prep_resize_notifications there is just the one error
|
||||
# notification from the wrap_exception decorator.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'compute.%s' % fields.NotificationAction.EXCEPTION,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
|
||||
def test_snapshot_for_resize(self):
|
||||
"""Happy path test for _snapshot_for_resize."""
|
||||
|
@ -11139,10 +11138,10 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
add_fault.assert_called_once_with(
|
||||
self.context, self.instance, wrapped_exc, mock.ANY)
|
||||
# Assert wrap_exception is called.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'compute.%s' % fields.NotificationAction.EXCEPTION,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
# Assert errors_out_migration is called.
|
||||
self.assertEqual('error', self.migration.status)
|
||||
self.migration.save.assert_called_once_with()
|
||||
|
@ -11174,10 +11173,10 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
add_fault.assert_called_once_with(
|
||||
self.context, self.instance, ex, mock.ANY)
|
||||
# Assert wrap_exception is called.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'compute.%s' % fields.NotificationAction.EXCEPTION,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
# Assert errors_out_migration is called.
|
||||
self.assertEqual('error', self.migration.status)
|
||||
self.migration.save.assert_called_once_with()
|
||||
|
@ -11515,10 +11514,10 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
self.assertEqual('error', self.migration.status)
|
||||
self.migration.save.assert_called_once_with()
|
||||
# Assert wrap_exception is called.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'compute.%s' % fields.NotificationAction.EXCEPTION,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
|
||||
@mock.patch('nova.objects.Instance.save')
|
||||
@mock.patch('nova.compute.manager.ComputeManager.'
|
||||
|
@ -11553,10 +11552,10 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
self.assertEqual('error', self.migration.status)
|
||||
self.migration.save.assert_called_once_with()
|
||||
# Assert wrap_exception is called.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'compute.%s' % fields.NotificationAction.EXCEPTION,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
|
||||
@mock.patch('nova.objects.Instance.get_bdms')
|
||||
@mock.patch('nova.compute.manager.ComputeManager.'
|
||||
|
@ -11712,10 +11711,10 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
self.assertEqual('error', self.migration.status)
|
||||
self.migration.save.assert_called_once_with()
|
||||
# Assert wrap_exception is called.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'compute.%s' % fields.NotificationAction.EXCEPTION,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
# clear_events_for_instance should not have been called.
|
||||
mock_clear_events.assert_not_called()
|
||||
|
||||
|
@ -11847,10 +11846,10 @@ class ComputeManagerMigrationTestCase(test.NoDBTestCase,
|
|||
mock_add_fault.assert_called_once_with(
|
||||
self.context, self.instance, error, test.MatchType(tuple))
|
||||
# wrap_exception should have sent an error notification.
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'compute.%s' % fields.NotificationAction.EXCEPTION,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
|
||||
# Now run it again but _finish_revert_snapshot_based_resize_at_source
|
||||
# will pass and _update_scheduler_instance_info will fail but not be
|
||||
|
|
|
@ -43,6 +43,7 @@ from nova.objects import fields
|
|||
from nova import rpc
|
||||
from nova.scheduler.client import report
|
||||
from nova import test
|
||||
from nova.tests import fixtures
|
||||
from nova.tests.unit import fake_block_device
|
||||
from nova.tests.unit import fake_crypto
|
||||
from nova.tests.unit import fake_instance
|
||||
|
@ -360,8 +361,7 @@ class UsageInfoTestCase(test.TestCase):
|
|||
|
||||
super(UsageInfoTestCase, self).setUp()
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(fixtures.NotificationFixture(self))
|
||||
|
||||
self.flags(compute_driver='fake.FakeDriver')
|
||||
self.compute = manager.ComputeManager()
|
||||
|
@ -417,8 +417,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
|
||||
compute_utils.notify_usage_exists(
|
||||
rpc.get_notifier('compute'), self.context, instance, 'fake-host')
|
||||
self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1)
|
||||
msg = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(len(self.notifier.versioned_notifications), 1)
|
||||
msg = self.notifier.versioned_notifications[0]
|
||||
self.assertEqual(msg['priority'], 'INFO')
|
||||
self.assertEqual(msg['event_type'], 'instance.exists')
|
||||
payload = msg['payload']['nova_object.data']
|
||||
|
@ -493,8 +493,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
phase='start',
|
||||
bdms=bdms)
|
||||
|
||||
self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1)
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(len(self.notifier.versioned_notifications), 1)
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual(notification['priority'], 'INFO')
|
||||
self.assertEqual(notification['event_type'], 'instance.delete.start')
|
||||
|
@ -539,8 +539,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
host='fake-compute',
|
||||
phase='start')
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual('INFO', notification['priority'])
|
||||
self.assertEqual('instance.create.start', notification['event_type'])
|
||||
|
@ -582,8 +582,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
host='fake-compute',
|
||||
phase='start')
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual('INFO', notification['priority'])
|
||||
self.assertEqual('instance.create.start', notification['event_type'])
|
||||
|
@ -621,8 +621,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
host='fake-compute',
|
||||
phase='start')
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual('INFO', notification['priority'])
|
||||
self.assertEqual('instance.create.start', notification['event_type'])
|
||||
|
@ -656,8 +656,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
fields.NotificationPhase.START,
|
||||
uuids.old_volume_id, uuids.new_volume_id)
|
||||
|
||||
self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1)
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(len(self.notifier.versioned_notifications), 1)
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual('INFO', notification['priority'])
|
||||
self.assertEqual('instance.%s.%s' %
|
||||
|
@ -697,8 +697,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
fields.NotificationPhase.ERROR,
|
||||
uuids.old_volume_id, uuids.new_volume_id, ex)
|
||||
|
||||
self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1)
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(len(self.notifier.versioned_notifications), 1)
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual('ERROR', notification['priority'])
|
||||
self.assertEqual('instance.%s.%s' %
|
||||
|
@ -748,8 +748,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
uuids.rescue_image_ref,
|
||||
phase='start')
|
||||
|
||||
self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1)
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(len(self.notifier.versioned_notifications), 1)
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual(notification['priority'], 'INFO')
|
||||
self.assertEqual(notification['event_type'], 'instance.rescue.start')
|
||||
|
@ -781,8 +781,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
compute_utils.notify_about_resize_prep_instance(
|
||||
self.context, instance, 'fake-compute', 'start', new_flavor)
|
||||
|
||||
self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1)
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(len(self.notifier.versioned_notifications), 1)
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual(notification['priority'], 'INFO')
|
||||
self.assertEqual(notification['event_type'],
|
||||
|
@ -854,8 +854,8 @@ class UsageInfoTestCase(test.TestCase):
|
|||
compute_utils.notify_about_volume_usage(self.context, vol_usage,
|
||||
'fake-compute')
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
|
||||
self.assertEqual('INFO', notification['priority'])
|
||||
self.assertEqual('volume.usage', notification['event_type'])
|
||||
|
@ -1235,8 +1235,7 @@ class ComputeUtilsTestCase(test.NoDBTestCase):
|
|||
class ServerGroupTestCase(test.TestCase):
|
||||
def setUp(self):
|
||||
super(ServerGroupTestCase, self).setUp()
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(fixtures.NotificationFixture(self))
|
||||
self.user_id = 'fake'
|
||||
self.project_id = 'fake'
|
||||
self.context = context.RequestContext(self.user_id, self.project_id)
|
||||
|
@ -1253,8 +1252,8 @@ class ServerGroupTestCase(test.TestCase):
|
|||
def test_notify_about_server_group_action(self):
|
||||
compute_utils.notify_about_server_group_action(self.context,
|
||||
self.group, 'create')
|
||||
self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1)
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(len(self.notifier.versioned_notifications), 1)
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
expected = {'priority': 'INFO',
|
||||
'event_type': u'server_group.create',
|
||||
'publisher_id': u'nova-api:fake-mini',
|
||||
|
@ -1285,8 +1284,8 @@ class ServerGroupTestCase(test.TestCase):
|
|||
self.context, uuids.server_group)
|
||||
mock_get_by_uuid.assert_called_once_with(self.context,
|
||||
uuids.server_group)
|
||||
self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1)
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(len(self.notifier.versioned_notifications), 1)
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
expected = {'priority': 'INFO',
|
||||
'event_type': u'server_group.add_member',
|
||||
'publisher_id': u'nova-api:fake-mini',
|
||||
|
|
|
@ -38,8 +38,7 @@ class ComputeHostAPITestCase(test.TestCase):
|
|||
self.host_api = compute.HostAPI()
|
||||
self.aggregate_api = compute.AggregateAPI()
|
||||
self.ctxt = context.get_admin_context()
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.useFixture(nova_fixtures.NotificationFixture(self))
|
||||
self.req = fakes.HTTPRequest.blank('')
|
||||
self.controller = services.ServiceController()
|
||||
self.useFixture(nova_fixtures.SingleCellSimple())
|
||||
|
|
|
@ -39,6 +39,7 @@ from nova.objects import pci_device
|
|||
from nova.pci import manager as pci_manager
|
||||
from nova.scheduler.client import report
|
||||
from nova import test
|
||||
from nova.tests import fixtures
|
||||
from nova.tests.unit import fake_instance
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova.tests.unit.objects import test_pci_device as fake_pci_device
|
||||
|
@ -3796,8 +3797,7 @@ class ComputeMonitorTestCase(BaseTestCase):
|
|||
|
||||
@mock.patch('nova.compute.utils.notify_about_metrics_update')
|
||||
def test_get_host_metrics(self, mock_notify):
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.useFixture(fixtures.NotificationFixture(self))
|
||||
|
||||
class FakeCPUMonitor(monitor_base.MonitorBase):
|
||||
|
||||
|
|
|
@ -138,8 +138,7 @@ class _BaseTestCase(object):
|
|||
self.project_id = fakes.FAKE_PROJECT_ID
|
||||
self.context = FakeContext(self.user_id, self.project_id)
|
||||
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.useFixture(fixtures.NotificationFixture(self))
|
||||
|
||||
self.stub_out('nova.rpc.RequestContextSerializer.deserialize_context',
|
||||
lambda *args, **kwargs: self.context)
|
||||
|
|
|
@ -34,7 +34,6 @@ from nova.objects import fields
|
|||
from nova.objects import virt_device_metadata
|
||||
from nova import test
|
||||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.unit import fake_notifier
|
||||
from nova import utils
|
||||
|
||||
|
||||
|
@ -211,8 +210,7 @@ class _BaseTestCase(test.TestCase):
|
|||
self.user_id = 'fake-user'
|
||||
self.project_id = 'fake-project'
|
||||
self.context = context.RequestContext(self.user_id, self.project_id)
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.useFixture(nova_fixtures.NotificationFixture(self))
|
||||
|
||||
# NOTE(danms): register these here instead of at import time
|
||||
# so that they're not always present
|
||||
|
|
|
@ -19,6 +19,7 @@ import mock
|
|||
from nova import context as nova_context
|
||||
from nova import exception_wrapper
|
||||
from nova import test
|
||||
from nova.tests import fixtures as nova_fixtures
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
||||
|
@ -52,8 +53,8 @@ def good_function(self, context):
|
|||
class WrapExceptionTestCase(test.NoDBTestCase):
|
||||
def setUp(self):
|
||||
super(WrapExceptionTestCase, self).setUp()
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(
|
||||
nova_fixtures.NotificationFixture(self))
|
||||
|
||||
def test_cleanse_dict(self):
|
||||
kwargs = {'foo': 1, 'blah_pass': 2, 'zoo_password': 3, '_pass': 4}
|
||||
|
@ -67,7 +68,7 @@ class WrapExceptionTestCase(test.NoDBTestCase):
|
|||
service='compute', binary='nova-compute')
|
||||
self.assertEqual(99, wrapped(good_function)(1, 2))
|
||||
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(0, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(0, len(self.notifier.versioned_notifications))
|
||||
|
||||
def test_wrap_exception_unknown_module(self):
|
||||
ctxt = nova_context.get_admin_context()
|
||||
|
@ -75,8 +76,8 @@ class WrapExceptionTestCase(test.NoDBTestCase):
|
|||
service='compute', binary='nova-compute')
|
||||
self.assertRaises(
|
||||
TypeError, wrapped(bad_function_unknown_module), None, ctxt)
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
payload = notification['payload']['nova_object.data']
|
||||
self.assertEqual('unknown', payload['module_name'])
|
||||
|
||||
|
@ -96,8 +97,8 @@ class WrapExceptionTestCase(test.NoDBTestCase):
|
|||
self.assertIn(key, notification.payload.keys())
|
||||
self.assertNotIn('context', notification.payload['args'].keys())
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
notification = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
notification = self.notifier.versioned_notifications[0]
|
||||
self.assertEqual('compute.exception', notification['event_type'])
|
||||
self.assertEqual('nova-compute:fake-mini',
|
||||
notification['publisher_id'])
|
||||
|
|
|
@ -32,6 +32,7 @@ from nova.notifications import base as notifications
|
|||
from nova import objects
|
||||
from nova.objects import base as obj_base
|
||||
from nova import test
|
||||
from nova.tests import fixtures
|
||||
from nova.tests.unit import fake_network
|
||||
from nova.tests.unit import fake_notifier
|
||||
|
||||
|
@ -46,8 +47,7 @@ class NotificationsTestCase(test.TestCase):
|
|||
self.fixture = self.useFixture(o_fixture.ClearRequestContext())
|
||||
|
||||
self.net_info = fake_network.fake_get_instance_nw_info(self)
|
||||
fake_notifier.stub_notifier(self)
|
||||
self.addCleanup(fake_notifier.reset)
|
||||
self.notifier = self.useFixture(fixtures.NotificationFixture(self))
|
||||
|
||||
self.flags(host='testhost')
|
||||
self.flags(notify_on_state_change="vm_and_task_state",
|
||||
|
@ -106,7 +106,7 @@ class NotificationsTestCase(test.TestCase):
|
|||
|
||||
notifications.send_update(self.context, old, self.instance)
|
||||
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(0, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(0, len(self.notifier.versioned_notifications))
|
||||
|
||||
def test_task_notif(self):
|
||||
|
||||
|
@ -127,7 +127,7 @@ class NotificationsTestCase(test.TestCase):
|
|||
verify_states=True)
|
||||
|
||||
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(0, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(0, len(self.notifier.versioned_notifications))
|
||||
|
||||
# ok now enable task state notifications and re-try
|
||||
self.flags(notify_on_state_change="vm_and_task_state",
|
||||
|
@ -135,11 +135,11 @@ class NotificationsTestCase(test.TestCase):
|
|||
|
||||
notifications.send_update(self.context, old, self.instance)
|
||||
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
|
||||
self.assertEqual(
|
||||
'instance.update',
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
|
||||
def test_send_no_notif(self):
|
||||
|
||||
|
@ -154,7 +154,7 @@ class NotificationsTestCase(test.TestCase):
|
|||
service="compute", host=None, verify_states=True)
|
||||
|
||||
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(0, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(0, len(self.notifier.versioned_notifications))
|
||||
|
||||
def test_send_on_vm_change(self):
|
||||
old = obj_base.obj_to_primitive(self.instance)
|
||||
|
@ -168,13 +168,13 @@ class NotificationsTestCase(test.TestCase):
|
|||
notif = fake_notifier.NOTIFICATIONS[0]
|
||||
self.assertEqual('compute.testhost', notif.publisher_id)
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'nova-compute:testhost',
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['publisher_id'])
|
||||
self.notifier.versioned_notifications[0]['publisher_id'])
|
||||
self.assertEqual(
|
||||
'instance.update',
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
|
||||
def test_send_on_task_change(self):
|
||||
|
||||
|
@ -185,10 +185,10 @@ class NotificationsTestCase(test.TestCase):
|
|||
notifications.send_update(self.context, old, self.instance)
|
||||
|
||||
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'instance.update',
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
|
||||
def test_no_update_with_states(self):
|
||||
|
||||
|
@ -196,7 +196,7 @@ class NotificationsTestCase(test.TestCase):
|
|||
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
|
||||
task_states.SPAWNING, verify_states=True)
|
||||
self.assertEqual(0, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(0, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(0, len(self.notifier.versioned_notifications))
|
||||
|
||||
def test_vm_update_with_states(self):
|
||||
fake_net_info = fake_network.fake_get_instance_nw_info(self)
|
||||
|
@ -211,10 +211,10 @@ class NotificationsTestCase(test.TestCase):
|
|||
def _verify_notification(self, expected_state=vm_states.ACTIVE,
|
||||
expected_new_task_state=task_states.SPAWNING):
|
||||
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
self.assertEqual(
|
||||
'instance.update',
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]['event_type'])
|
||||
self.notifier.versioned_notifications[0]['event_type'])
|
||||
access_ip_v4 = str(self.instance.access_ip_v4)
|
||||
access_ip_v6 = str(self.instance.access_ip_v6)
|
||||
display_name = self.instance.display_name
|
||||
|
@ -235,7 +235,7 @@ class NotificationsTestCase(test.TestCase):
|
|||
self.assertEqual("2017-02-02T16:45:00.000000",
|
||||
payload["audit_period_ending"])
|
||||
|
||||
payload = fake_notifier.VERSIONED_NOTIFICATIONS[0][
|
||||
payload = self.notifier.versioned_notifications[0][
|
||||
'payload']['nova_object.data']
|
||||
state_update = payload['state_update']['nova_object.data']
|
||||
self.assertEqual(vm_states.BUILDING, state_update['old_state'])
|
||||
|
@ -284,14 +284,14 @@ class NotificationsTestCase(test.TestCase):
|
|||
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
|
||||
None)
|
||||
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
|
||||
# service name should default to 'compute'
|
||||
notif = fake_notifier.NOTIFICATIONS[0]
|
||||
self.assertEqual('compute.testhost', notif.publisher_id)
|
||||
|
||||
# in the versioned notification it defaults to nova-compute
|
||||
notif = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
notif = self.notifier.versioned_notifications[0]
|
||||
self.assertEqual('nova-compute:testhost', notif['publisher_id'])
|
||||
|
||||
def test_update_with_service_name(self):
|
||||
|
@ -299,13 +299,13 @@ class NotificationsTestCase(test.TestCase):
|
|||
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
|
||||
None, service="nova-compute")
|
||||
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
|
||||
# service name should default to 'compute'
|
||||
notif = fake_notifier.NOTIFICATIONS[0]
|
||||
self.assertEqual('nova-compute.testhost', notif.publisher_id)
|
||||
|
||||
notif = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
notif = self.notifier.versioned_notifications[0]
|
||||
self.assertEqual('nova-compute:testhost', notif['publisher_id'])
|
||||
|
||||
def test_update_with_host_name(self):
|
||||
|
@ -313,13 +313,13 @@ class NotificationsTestCase(test.TestCase):
|
|||
vm_states.BUILDING, vm_states.BUILDING, task_states.SPAWNING,
|
||||
None, host="someotherhost")
|
||||
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
|
||||
# service name should default to 'compute'
|
||||
notif = fake_notifier.NOTIFICATIONS[0]
|
||||
self.assertEqual('compute.someotherhost', notif.publisher_id)
|
||||
|
||||
notif = fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
notif = self.notifier.versioned_notifications[0]
|
||||
self.assertEqual('nova-compute:someotherhost', notif['publisher_id'])
|
||||
|
||||
def test_payload_has_fixed_ip_labels(self):
|
||||
|
@ -411,14 +411,14 @@ class NotificationsTestCase(test.TestCase):
|
|||
new_name_inst = self._wrapped_create(params=param)
|
||||
notifications.send_update(self.context, self.instance, new_name_inst)
|
||||
self.assertEqual(1, len(fake_notifier.NOTIFICATIONS))
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
|
||||
old_display_name = self.instance.display_name
|
||||
new_display_name = new_name_inst.display_name
|
||||
|
||||
for payload in [
|
||||
fake_notifier.NOTIFICATIONS[0].payload,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0][
|
||||
self.notifier.versioned_notifications[0][
|
||||
'payload']['nova_object.data']]:
|
||||
|
||||
self.assertEqual(payload["old_display_name"], old_display_name)
|
||||
|
@ -428,24 +428,24 @@ class NotificationsTestCase(test.TestCase):
|
|||
objects.TagList.create(self.context,
|
||||
self.instance.uuid, [u'tag1', u'tag2'])
|
||||
notifications.send_update(self.context, self.instance, self.instance)
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
|
||||
self.assertEqual([u'tag1', u'tag2'],
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.notifier.versioned_notifications[0]
|
||||
['payload']['nova_object.data']['tags'])
|
||||
|
||||
def test_send_versioned_action_initiator_update(self):
|
||||
notifications.send_update(self.context, self.instance, self.instance)
|
||||
action_initiator_user = self.context.user_id
|
||||
action_initiator_project = self.context.project_id
|
||||
self.assertEqual(1, len(fake_notifier.VERSIONED_NOTIFICATIONS))
|
||||
self.assertEqual(1, len(self.notifier.versioned_notifications))
|
||||
|
||||
self.assertEqual(action_initiator_user,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.notifier.versioned_notifications[0]
|
||||
['payload']['nova_object.data']
|
||||
['action_initiator_user'])
|
||||
self.assertEqual(action_initiator_project,
|
||||
fake_notifier.VERSIONED_NOTIFICATIONS[0]
|
||||
self.notifier.versioned_notifications[0]
|
||||
['payload']['nova_object.data']
|
||||
['action_initiator_project'])
|
||||
|
||||
|
|
Loading…
Reference in New Issue