diff --git a/doc/notification_samples/common_payloads/AuditPeriodPayload.json b/doc/notification_samples/common_payloads/AuditPeriodPayload.json new file mode 100644 index 000000000000..fd917d704b52 --- /dev/null +++ b/doc/notification_samples/common_payloads/AuditPeriodPayload.json @@ -0,0 +1,9 @@ +{ + "nova_object.data": { + "audit_period_beginning": "2012-10-01T00:00:00Z", + "audit_period_ending": "2012-10-29T13:42:11Z" + }, + "nova_object.name": "AuditPeriodPayload", + "nova_object.namespace": "nova", + "nova_object.version": "1.0" +} diff --git a/doc/notification_samples/common_payloads/BandwidthPayload.json b/doc/notification_samples/common_payloads/BandwidthPayload.json new file mode 100644 index 000000000000..dd1733c464f7 --- /dev/null +++ b/doc/notification_samples/common_payloads/BandwidthPayload.json @@ -0,0 +1,10 @@ +{ + "nova_object.data": { + "network_name": "private-network", + "out_bytes": 0, + "in_bytes": 0 + }, + "nova_object.name": "BandwidthPayload", + "nova_object.namespace": "nova", + "nova_object.version": "1.0" +} diff --git a/doc/notification_samples/common_payloads/InstanceExistsPayload.json b/doc/notification_samples/common_payloads/InstanceExistsPayload.json new file mode 100644 index 000000000000..59f11c55da40 --- /dev/null +++ b/doc/notification_samples/common_payloads/InstanceExistsPayload.json @@ -0,0 +1,12 @@ +{ + "$ref": "InstancePayload.json", + "nova_object.data":{ + "audit_period": {"$ref": "AuditPeriodPayload.json#"}, + "bandwidth": [ + {"$ref": "BandwidthPayload.json#"} + ] + }, + "nova_object.name":"InstanceExistsPayload", + "nova_object.namespace":"nova", + "nova_object.version":"1.0" +} diff --git a/doc/notification_samples/instance-exists.json b/doc/notification_samples/instance-exists.json new file mode 100755 index 000000000000..0c30c3e536db --- /dev/null +++ b/doc/notification_samples/instance-exists.json @@ -0,0 +1,13 @@ +{ + "event_type":"instance.exists", + "payload":{ + "$ref":"common_payloads/InstanceExistsPayload.json#", + "nova_object.data":{ + "architecture":null, + "image_uuid":"a2459075-d96c-40d5-893e-577ff92e721c", + "task_state":"rebuilding" + } + }, + "priority":"INFO", + "publisher_id":"nova-compute:compute" +} diff --git a/nova/compute/manager.py b/nova/compute/manager.py index 609e88e6be89..55c59f5f2e2f 100644 --- a/nova/compute/manager.py +++ b/nova/compute/manager.py @@ -2993,7 +2993,7 @@ class ComputeManager(manager.Manager): context) extra_usage_info = {'image_ref_url': orig_image_ref_url} compute_utils.notify_usage_exists( - self.notifier, context, instance, + self.notifier, context, instance, self.host, current_period=True, system_metadata=orig_sys_metadata, extra_usage_info=extra_usage_info) @@ -3586,7 +3586,7 @@ class ComputeManager(manager.Manager): reason=_("Driver Error: %s") % e) compute_utils.notify_usage_exists(self.notifier, context, instance, - current_period=True) + self.host, current_period=True) instance.vm_state = vm_states.RESCUED instance.task_state = None @@ -3849,7 +3849,7 @@ class ComputeManager(manager.Manager): # NOTE(comstud): A revert_resize is essentially a resize back to # the old size, so we need to send a usage event here. compute_utils.notify_usage_exists(self.notifier, context, instance, - current_period=True) + self.host, current_period=True) with self._error_out_instance_on_exception(context, instance): # NOTE(tr3buchet): tear down networks on destination host @@ -4107,7 +4107,7 @@ class ComputeManager(manager.Manager): with self._error_out_instance_on_exception(context, instance), \ errors_out_migration_ctxt(migration): compute_utils.notify_usage_exists(self.notifier, context, instance, - current_period=True) + self.host, current_period=True) self._notify_about_instance_usage( context, instance, "resize.prep.start") compute_utils.notify_about_resize_prep_instance( @@ -4724,7 +4724,7 @@ class ComputeManager(manager.Manager): else: bdms = None compute_utils.notify_usage_exists(self.notifier, context, instance, - current_period=True) + self.host, current_period=True) self._notify_about_instance_usage(context, instance, 'shelve.start') compute_utils.notify_about_instance_action(context, instance, self.host, action=fields.NotificationAction.SHELVE, @@ -6891,7 +6891,7 @@ class ComputeManager(manager.Manager): for instance in instances: try: compute_utils.notify_usage_exists( - self.notifier, context, instance, + self.notifier, context, instance, self.host, ignore_missing_network_data=False) successes += 1 except Exception: diff --git a/nova/compute/utils.py b/nova/compute/utils.py index 0ad0231561ca..f12932fbe905 100644 --- a/nova/compute/utils.py +++ b/nova/compute/utils.py @@ -261,16 +261,17 @@ def get_value_from_system_metadata(instance, key, type, default): return default -def notify_usage_exists(notifier, context, instance_ref, current_period=False, - ignore_missing_network_data=True, +def notify_usage_exists(notifier, context, instance_ref, host, + current_period=False, ignore_missing_network_data=True, system_metadata=None, extra_usage_info=None): - """Generates 'exists' unversioned legacy notification for an instance for - usage auditing purposes. + """Generates 'exists' unversioned legacy and transformed notification + for an instance for usage auditing purposes. :param notifier: a messaging.Notifier :param context: request context for the current operation :param instance_ref: nova.objects.Instance object from which to report usage + :param host: the host emitting the notification :param current_period: if True, this will generate a usage for the current usage period; if False, this will generate a usage for the previous audit period. @@ -303,6 +304,33 @@ def notify_usage_exists(notifier, context, instance_ref, current_period=False, notify_about_instance_usage(notifier, context, instance_ref, 'exists', extra_usage_info=extra_info) + audit_period = instance_notification.AuditPeriodPayload( + audit_period_beginning=audit_start, + audit_period_ending=audit_end) + + bandwidth = [instance_notification.BandwidthPayload( + network_name=label, + in_bytes=b['bw_in'], + out_bytes=b['bw_out']) + for label, b in bw.items()] + + payload = instance_notification.InstanceExistsPayload( + context=context, + instance=instance_ref, + audit_period=audit_period, + bandwidth=bandwidth) + + notification = instance_notification.InstanceExistsNotification( + context=context, + priority=fields.NotificationPriority.INFO, + publisher=notification_base.NotificationPublisher( + host=host, source=fields.NotificationSource.COMPUTE), + event_type=notification_base.EventType( + object='instance', + action=fields.NotificationAction.EXISTS), + payload=payload) + notification.emit(context) + def notify_about_instance_usage(notifier, context, instance, event_suffix, network_info=None, extra_usage_info=None, diff --git a/nova/notifications/objects/instance.py b/nova/notifications/objects/instance.py index 64c4f1ad96c6..8b38b710bbd0 100644 --- a/nova/notifications/objects/instance.py +++ b/nova/notifications/objects/instance.py @@ -627,3 +627,30 @@ class InstanceActionSnapshotPayload(InstanceActionPayload): instance=instance, fault=fault) self.snapshot_image_id = snapshot_image_id + + +@nova_base.NovaObjectRegistry.register_notification +class InstanceExistsPayload(InstancePayload): + # Version 1.0: Initial version + VERSION = '1.0' + fields = { + 'audit_period': fields.ObjectField('AuditPeriodPayload'), + 'bandwidth': fields.ListOfObjectsField('BandwidthPayload'), + } + + def __init__(self, context, instance, audit_period, bandwidth): + super(InstanceExistsPayload, self).__init__(context=context, + instance=instance) + self.audit_period = audit_period + self.bandwidth = bandwidth + + +@base.notification_sample('instance-exists.json') +@nova_base.NovaObjectRegistry.register_notification +class InstanceExistsNotification(base.NotificationBase): + # Version 1.0: Initial version + VERSION = '1.0' + + fields = { + 'payload': fields.ObjectField('InstanceExistsPayload') + } diff --git a/nova/tests/functional/notification_sample_tests/test_instance.py b/nova/tests/functional/notification_sample_tests/test_instance.py index 9e3f17bc7055..2a174fe1b958 100644 --- a/nova/tests/functional/notification_sample_tests/test_instance.py +++ b/nova/tests/functional/notification_sample_tests/test_instance.py @@ -376,6 +376,46 @@ class TestInstanceNotificationSample( 'uuid': server['id']}, actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) + def test_instance_exists_usage_audit(self): + # TODO(xavvior): Should create a functional test for the + # "instance_usage_audit" periodic task. We didn't find usable + # solution for this problem, however we tried to test it in + # several ways. + pass + + def test_instance_exists(self): + server = self._boot_a_server( + extra_params={'networks': [{'port': self.neutron.port_1['id']}]}) + + self._attach_volume_to_server(server, self.cinder.SWAP_OLD_VOL) + + # Let's generate some bandwidth usage data. + # Just call the periodic task directly for simplicity + self.compute.manager._poll_bandwidth_usage(context.get_admin_context()) + + fake_notifier.reset() + + post = { + 'rebuild': { + 'imageRef': 'a2459075-d96c-40d5-893e-577ff92e721c', + 'metadata': {} + } + } + self.api.post_server_action(server['id'], post) + self._wait_for_state_change(self.api, server, + expected_status='REBUILD') + self._wait_for_state_change(self.api, server, + expected_status='ACTIVE') + + notifications = self._get_notifications('instance.exists') + self._verify_notification( + 'instance-exists', + replacements={ + 'reservation_id': server['reservation_id'], + 'uuid': server['id'] + }, + actual=notifications[0]) + def _verify_instance_update_steps(self, steps, notifications, initial=None): replacements = {} @@ -600,19 +640,19 @@ class TestInstanceNotificationSample( self._wait_for_state_change(self.api, server, expected_status='SHELVED') - self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS)) + self.assertEqual(3, len(fake_notifier.VERSIONED_NOTIFICATIONS)) self._verify_notification( 'instance-shelve-start', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[0]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) self._verify_notification( 'instance-shelve-end', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[2]) fake_notifier.reset() self.api.post_server_action(server['id'], {'shelveOffload': {}}) @@ -657,19 +697,19 @@ class TestInstanceNotificationSample( post = {'unshelve': None} self.api.post_server_action(server['id'], post) self._wait_for_state_change(self.admin_api, server, 'ACTIVE') - self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS)) + self.assertEqual(7, len(fake_notifier.VERSIONED_NOTIFICATIONS)) self._verify_notification( 'instance-unshelve-start', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[4]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[5]) self._verify_notification( 'instance-unshelve-end', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[5]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[6]) def _test_suspend_resume_server(self, server): post = {'suspend': {}} @@ -780,8 +820,9 @@ class TestInstanceNotificationSample( self.api.post_server_action(server['id'], post) self._wait_for_state_change(self.api, server, 'VERIFY_RESIZE') - self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS)) - + self.assertEqual(7, len(fake_notifier.VERSIONED_NOTIFICATIONS)) + # ignore instance.exists + fake_notifier.VERSIONED_NOTIFICATIONS.pop(0) # This list needs to be in order. expected_notifications = [ 'instance-resize_prep-start', @@ -799,24 +840,27 @@ class TestInstanceNotificationSample( 'uuid': server['id']}, actual=fake_notifier.VERSIONED_NOTIFICATIONS[idx]) + fake_notifier.reset() # the following is the revert server request post = {'revertResize': None} self.api.post_server_action(server['id'], post) self._wait_for_state_change(self.api, server, 'ACTIVE') - self.assertEqual(8, len(fake_notifier.VERSIONED_NOTIFICATIONS)) + self.assertEqual(3, len(fake_notifier.VERSIONED_NOTIFICATIONS)) + # ignore instance.exists + fake_notifier.VERSIONED_NOTIFICATIONS.pop(0) self._verify_notification( 'instance-resize_revert-start', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[6]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[0]) self._verify_notification( 'instance-resize_revert-end', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[7]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) @mock.patch('nova.compute.manager.ComputeManager._reschedule', return_value=True) @@ -853,18 +897,21 @@ class TestInstanceNotificationSample( mock_prep_resize.side_effect = _build_resources self.api.post_server_action(server['id'], post) self._wait_for_notification('instance.resize.error') - # 0: instance-resize_prep-start - # 1: instance-resize-error - # 2: instance-resize_prep-end + # 0: instance-exists + # 1: instance-resize_prep-start + # 2: instance-resize-error + # 3: instance-resize_prep-end self.assertLessEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS), 'Unexpected number of notifications: %s' % fake_notifier.VERSIONED_NOTIFICATIONS) + # Note(gibi): There is also an instance.exists notification emitted + # during the rescheduling self._verify_notification('instance-resize-error', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id'] }, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[2]) @mock.patch('nova.compute.manager.ComputeManager._reschedule') @mock.patch('nova.compute.manager.ComputeManager._prep_resize') @@ -908,14 +955,15 @@ class TestInstanceNotificationSample( self.api.post_server_action(server['id'], post) self._wait_for_state_change(self.api, server, expected_status='ERROR') self._wait_for_notification('compute.exception') - # There should be the following four notifications. - # 0: instance-resize_prep-start - # 1: instance-resize-error - # 2: instance-resize_prep-end - # 3: compute.exception + # There should be the following notifications. + # 0: instance-exists + # 1: instance-resize_prep-start + # 2: instance-resize-error + # 3: instance-resize_prep-end + # 4: compute.exception # (via the wrap_exception decorator on # the ComputeManager.prep_resize method.) - self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS), + self.assertEqual(5, len(fake_notifier.VERSIONED_NOTIFICATIONS), 'Unexpected number of notifications: %s' % fake_notifier.VERSIONED_NOTIFICATIONS) self._verify_notification('instance-resize-error', @@ -923,7 +971,7 @@ class TestInstanceNotificationSample( 'reservation_id': server['reservation_id'], 'uuid': server['id'] }, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[2]) def _test_snapshot_server(self, server): post = {'createImage': {'name': 'test-snap'}} @@ -973,13 +1021,13 @@ class TestInstanceNotificationSample( expected_status='ACTIVE') # The compute/manager will detach every volume during rebuild - self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS)) + self.assertEqual(5, len(fake_notifier.VERSIONED_NOTIFICATIONS)) self._verify_notification( 'instance-rebuild-start', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[0]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) self._verify_notification( 'instance-volume_detach-start', replacements={ @@ -988,7 +1036,7 @@ class TestInstanceNotificationSample( 'architecture': None, 'image_uuid': 'a2459075-d96c-40d5-893e-577ff92e721c', 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[2]) self._verify_notification( 'instance-volume_detach-end', replacements={ @@ -997,13 +1045,13 @@ class TestInstanceNotificationSample( 'architecture': None, 'image_uuid': 'a2459075-d96c-40d5-893e-577ff92e721c', 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[2]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[3]) self._verify_notification( 'instance-rebuild-end', replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[3]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[4]) @mock.patch('nova.compute.manager.ComputeManager.' '_do_rebuild_instance_with_claim') @@ -1301,7 +1349,10 @@ class TestInstanceNotificationSample( self.api.post_server_action(server['id'], post) self._wait_for_state_change(self.admin_api, server, 'RESCUE') - self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS)) + # 0. instance.rescue.start + # 1. instance.exists + # 2. instance.rescue.end + self.assertEqual(3, len(fake_notifier.VERSIONED_NOTIFICATIONS)) self._verify_notification( 'instance-rescue-start', replacements={ @@ -1313,7 +1364,7 @@ class TestInstanceNotificationSample( replacements={ 'reservation_id': server['reservation_id'], 'uuid': server['id']}, - actual=fake_notifier.VERSIONED_NOTIFICATIONS[1]) + actual=fake_notifier.VERSIONED_NOTIFICATIONS[2]) fake_notifier.reset() # unrescue notification asserts diff --git a/nova/tests/json_ref.py b/nova/tests/json_ref.py index ce1399e7557f..62682023c3be 100644 --- a/nova/tests/json_ref.py +++ b/nova/tests/json_ref.py @@ -40,9 +40,11 @@ def resolve_refs(obj_with_refs, base_path): if '$ref' in obj_with_refs.keys(): ref = obj_with_refs.pop('$ref') resolved_ref = _resolve_ref(ref, base_path) - # the rest of the ref dict contains overrides for the ref. Apply - # those overrides recursively here. - _update_dict_recursively(resolved_ref, obj_with_refs) + # the rest of the ref dict contains overrides for the ref. Resolve + # refs in the overrides then apply those overrides recursively + # here. + resolved_overrides = resolve_refs(obj_with_refs, base_path) + _update_dict_recursively(resolved_ref, resolved_overrides) return resolved_ref else: for key, value in obj_with_refs.items(): diff --git a/nova/tests/unit/compute/test_compute_mgr.py b/nova/tests/unit/compute/test_compute_mgr.py index 7e9fff46f4c1..9882bccd8f52 100644 --- a/nova/tests/unit/compute/test_compute_mgr.py +++ b/nova/tests/unit/compute/test_compute_mgr.py @@ -1545,7 +1545,13 @@ class ComputeManagerUnitTestCase(test.NoDBTestCase): mock_instance_list.assert_called_with(self.context, expected_filters, use_slave=True) - def test_instance_usage_audit(self): + @mock.patch.object(compute_utils, 'notify_usage_exists') + @mock.patch.object(objects.TaskLog, 'end_task') + @mock.patch.object(objects.TaskLog, 'begin_task') + @mock.patch.object(objects.InstanceList, 'get_active_by_window_joined') + @mock.patch.object(objects.TaskLog, 'get') + def test_instance_usage_audit(self, mock_get, mock_get_active, mock_begin, + mock_end, mock_notify): instances = [objects.Instance(uuid=uuids.instance)] def fake_task_log(*a, **k): @@ -1554,26 +1560,19 @@ class ComputeManagerUnitTestCase(test.NoDBTestCase): def fake_get(*a, **k): return instances + mock_get.side_effect = fake_task_log + mock_get_active.side_effect = fake_get + mock_begin.side_effect = fake_task_log + mock_end.side_effect = fake_task_log self.flags(instance_usage_audit=True) - with test.nested( - mock.patch.object(objects.TaskLog, 'get', - side_effect=fake_task_log), - mock.patch.object(objects.InstanceList, - 'get_active_by_window_joined', - side_effect=fake_get), - mock.patch.object(objects.TaskLog, 'begin_task', - side_effect=fake_task_log), - mock.patch.object(objects.TaskLog, 'end_task', - side_effect=fake_task_log), - mock.patch.object(compute_utils, 'notify_usage_exists') - ) as (mock_get, mock_get_active, mock_begin, mock_end, mock_notify): - self.compute._instance_usage_audit(self.context) - mock_notify.assert_called_once_with(self.compute.notifier, - self.context, instances[0], ignore_missing_network_data=False) - self.assertTrue(mock_get.called) - self.assertTrue(mock_get_active.called) - self.assertTrue(mock_begin.called) - self.assertTrue(mock_end.called) + self.compute._instance_usage_audit(self.context) + mock_notify.assert_called_once_with( + self.compute.notifier, self.context, instances[0], 'fake-mini', + ignore_missing_network_data=False) + self.assertTrue(mock_get.called) + self.assertTrue(mock_get_active.called) + self.assertTrue(mock_begin.called) + self.assertTrue(mock_end.called) @mock.patch.object(objects.InstanceList, 'get_by_host') def test_sync_power_states(self, mock_get): @@ -3381,7 +3380,7 @@ class ComputeManagerUnitTestCase(test.NoDBTestCase): 'verybadpass') notify_usage_exists.assert_called_once_with(self.compute.notifier, - self.context, instance, current_period=True) + self.context, instance, 'fake-mini', current_period=True) mock_notify.assert_has_calls([ mock.call(self.context, instance, 'fake-mini', None, phase='start'), diff --git a/nova/tests/unit/compute/test_compute_utils.py b/nova/tests/unit/compute/test_compute_utils.py index 9e087a92a42b..c834f9183b8d 100644 --- a/nova/tests/unit/compute/test_compute_utils.py +++ b/nova/tests/unit/compute/test_compute_utils.py @@ -425,7 +425,7 @@ class UsageInfoTestCase(test.TestCase): instance.system_metadata.update(sys_metadata) instance.save() compute_utils.notify_usage_exists( - rpc.get_notifier('compute'), self.context, instance) + rpc.get_notifier('compute'), self.context, instance, 'fake-host') self.assertEqual(len(fake_notifier.NOTIFICATIONS), 1) msg = fake_notifier.NOTIFICATIONS[0] self.assertEqual(msg.priority, 'INFO') @@ -446,12 +446,39 @@ class UsageInfoTestCase(test.TestCase): self.assertIn(attr, payload, "Key %s not in payload" % attr) self.assertEqual(payload['image_meta'], - {'md_key1': 'val1', 'md_key2': 'val2'}) + {'md_key1': 'val1', 'md_key2': 'val2'}) image_ref_url = "%s/images/%s" % ( glance.generate_glance_url(self.context), uuids.fake_image_ref) self.assertEqual(payload['image_ref_url'], image_ref_url) self.compute.terminate_instance(self.context, instance, []) + def test_notify_usage_exists_emits_versioned(self): + # Ensure 'exists' notification generates appropriate usage data. + instance = create_instance(self.context) + + compute_utils.notify_usage_exists( + rpc.get_notifier('compute'), self.context, instance, 'fake-host') + self.assertEqual(len(fake_notifier.VERSIONED_NOTIFICATIONS), 1) + msg = fake_notifier.VERSIONED_NOTIFICATIONS[0] + self.assertEqual(msg['priority'], 'INFO') + self.assertEqual(msg['event_type'], 'instance.exists') + payload = msg['payload']['nova_object.data'] + self.assertEqual(payload['tenant_id'], self.project_id) + self.assertEqual(payload['user_id'], self.user_id) + self.assertEqual(payload['uuid'], instance['uuid']) + flavor = payload['flavor']['nova_object.data'] + self.assertEqual(flavor['name'], 'm1.tiny') + flavorid = flavors.get_flavor_by_name('m1.tiny')['flavorid'] + self.assertEqual(str(flavor['flavorid']), str(flavorid)) + + for attr in ('display_name', 'created_at', 'launched_at', + 'state', 'bandwidth', 'audit_period'): + self.assertIn(attr, payload, + "Key %s not in payload" % attr) + + self.assertEqual(payload['image_uuid'], uuids.fake_image_ref) + self.compute.terminate_instance(self.context, instance, []) + def test_notify_usage_exists_deleted_instance(self): # Ensure 'exists' notification generates appropriate usage data. instance = create_instance(self.context) @@ -463,7 +490,7 @@ class UsageInfoTestCase(test.TestCase): instance.save() self.compute.terminate_instance(self.context, instance, []) compute_utils.notify_usage_exists( - rpc.get_notifier('compute'), self.context, instance) + rpc.get_notifier('compute'), self.context, instance, 'fake-host') msg = fake_notifier.NOTIFICATIONS[-1] self.assertEqual(msg.priority, 'INFO') self.assertEqual(msg.event_type, 'compute.instance.exists') @@ -825,7 +852,7 @@ class UsageInfoTestCase(test.TestCase): instance = create_instance(self.context) self.compute.terminate_instance(self.context, instance, []) compute_utils.notify_usage_exists( - rpc.get_notifier('compute'), self.context, instance) + rpc.get_notifier('compute'), self.context, instance, 'fake-host') msg = fake_notifier.NOTIFICATIONS[-1] self.assertEqual(msg.priority, 'INFO') self.assertEqual(msg.event_type, 'compute.instance.exists') diff --git a/nova/tests/unit/notifications/objects/test_notification.py b/nova/tests/unit/notifications/objects/test_notification.py index 552aa9099ab9..5e358ecb37b4 100644 --- a/nova/tests/unit/notifications/objects/test_notification.py +++ b/nova/tests/unit/notifications/objects/test_notification.py @@ -392,6 +392,8 @@ notification_object_data = { 'InstanceActionSnapshotNotification': '1.0-a73147b93b520ff0061865849d3dfa56', 'InstanceActionSnapshotPayload': '1.7-73f96d93ca47750bb6a45e4ab1d268fd', + 'InstanceExistsNotification': '1.0-a73147b93b520ff0061865849d3dfa56', + 'InstanceExistsPayload': '1.0-9b0c1232136e6c850647dfabb63cff07', 'InstanceStateUpdatePayload': '1.0-07e111c0fa0f6db0f79b0726d593e3da', 'InstanceUpdateNotification': '1.0-a73147b93b520ff0061865849d3dfa56', 'InstanceUpdatePayload': '1.7-d48dd2cf8310c8f250dfeb65fd9df97a', diff --git a/nova/tests/unit/test_json_ref.py b/nova/tests/unit/test_json_ref.py index 4ae172d8a4e6..dbec62419a13 100644 --- a/nova/tests/unit/test_json_ref.py +++ b/nova/tests/unit/test_json_ref.py @@ -133,6 +133,32 @@ class TestJsonRef(test.NoDBTestCase): actual) mock_open.assert_called_once_with('some/base/path/another.json', 'r+b') + @mock.patch('oslo_serialization.jsonutils.load') + @mock.patch('nova.tests.json_ref.open') + def test_resolve_ref_with_override_having_refs(self, mock_open, + mock_json_load): + mock_json_load.side_effect = [ + {'baz': 13, + 'boo': 42}, + {'something': 0}] + + actual = json_ref.resolve_refs( + {'foo': 1, + 'bar': {'$ref': 'another.json#', + 'boo': {'$ref': 'override_ref.json#'}}}, + 'some/base/path/') + + self.assertDictEqual({'foo': 1, + 'bar': {'baz': 13, + 'boo': {'something': 0}}}, + actual) + self.assertEqual(2, mock_open.call_count) + # any_order=True is needed as context manager calls also done on open + mock_open.assert_has_calls( + [mock.call('some/base/path/another.json', 'r+b'), + mock.call('some/base/path/override_ref.json', 'r+b')], + any_order=True) + def test_ref_with_json_path_not_supported(self): self.assertRaises(