Remove unnecessary tests
Remove unnecessary tests for log_publisher connected with kafka key building. Change-Id: I9bdf1e1fc868090fea6bbfd6daca7278aafb0324
This commit is contained in:
parent
25c87eb872
commit
e9ca59c1c1
|
@ -14,7 +14,6 @@
|
|||
# under the License.
|
||||
|
||||
import datetime
|
||||
import random
|
||||
import ujson
|
||||
import unittest
|
||||
|
||||
|
@ -48,30 +47,6 @@ class TestSendMessage(testing.TestBase):
|
|||
not_dict_value = 123
|
||||
instance.send_message(not_dict_value)
|
||||
|
||||
@mock.patch('monasca_log_api.reference.common.log_publisher.producer'
|
||||
'.KafkaProducer')
|
||||
def test_should_not_send_message_missing_keys(self, _):
|
||||
# checks every combination of missing keys
|
||||
# test does not rely on those keys having a value or not,
|
||||
# it simply assumes that values are set but important
|
||||
# message (i.e. envelope) properties are missing entirely
|
||||
# that's why there are two loops instead of three
|
||||
|
||||
instance = log_publisher.LogPublisher()
|
||||
keys = ['log', 'creation_time', 'meta']
|
||||
|
||||
for key_1 in keys:
|
||||
diff = keys[:]
|
||||
diff.remove(key_1)
|
||||
for key_2 in diff:
|
||||
message = {
|
||||
key_1: random.randint(10, 20),
|
||||
key_2: random.randint(30, 50)
|
||||
}
|
||||
self.assertRaises(log_publisher.InvalidMessageException,
|
||||
instance.send_message,
|
||||
message)
|
||||
|
||||
@mock.patch('monasca_log_api.reference.common.log_publisher.producer'
|
||||
'.KafkaProducer')
|
||||
def test_should_not_send_message_missing_values(self, _):
|
||||
|
@ -173,117 +148,3 @@ class TestSendMessage(testing.TestBase):
|
|||
instance._kafka_publisher.publish.assert_any_call(
|
||||
topic,
|
||||
[json_msg])
|
||||
|
||||
@mock.patch(
|
||||
'monasca_log_api.reference.common.log_publisher.producer'
|
||||
'.KafkaProducer')
|
||||
def test_should_set_multiple_msgs_to_multiple_topics_diff_keys(self,
|
||||
publisher):
|
||||
topics = ['logs', 'analyzer', 'tester']
|
||||
self.conf.config(topics=topics, group='log_publisher')
|
||||
self.conf.config(max_log_size=5000, group='service')
|
||||
|
||||
instance = log_publisher.LogPublisher()
|
||||
instance._kafka_publisher = publisher
|
||||
|
||||
num_of_msgs = 3
|
||||
msgs_data = []
|
||||
creation_time = ((datetime.datetime.utcnow() - EPOCH_START)
|
||||
.total_seconds())
|
||||
for it in xrange(num_of_msgs):
|
||||
msg = {
|
||||
'log': {
|
||||
'message': it,
|
||||
'dimensions': {
|
||||
'hostname': 'localhost',
|
||||
'component': 'some_component.%d' % it,
|
||||
'path': '/var/log/test/%s/me.log' % it
|
||||
}
|
||||
},
|
||||
'creation_time': creation_time,
|
||||
'meta': {
|
||||
'tenantId': 1
|
||||
}
|
||||
}
|
||||
msgs_data.append(msg)
|
||||
|
||||
instance.send_message(msgs_data)
|
||||
|
||||
self.assertEqual(len(topics),
|
||||
instance._kafka_publisher.publish.call_count)
|
||||
|
||||
@mock.patch(
|
||||
'monasca_log_api.reference.common.log_publisher.producer'
|
||||
'.KafkaProducer')
|
||||
def test_should_set_multiple_msgs_to_multiple_topics_same_keys(self,
|
||||
publisher):
|
||||
topics = ['logs', 'analyzer', 'tester']
|
||||
self.conf.config(topics=topics, group='log_publisher')
|
||||
self.conf.config(max_log_size=5000, group='service')
|
||||
|
||||
instance = log_publisher.LogPublisher()
|
||||
instance._kafka_publisher = publisher
|
||||
|
||||
num_of_msgs = 3
|
||||
msgs_data = []
|
||||
creation_time = ((datetime.datetime.utcnow() - EPOCH_START)
|
||||
.total_seconds())
|
||||
for it in xrange(num_of_msgs):
|
||||
msg = {
|
||||
'log': {
|
||||
'message': it,
|
||||
'application_type': 'some_app_type',
|
||||
'dimensions': {
|
||||
'hostname': 'localhost',
|
||||
'path': '/var/log/test/same_key/me.log'
|
||||
}
|
||||
},
|
||||
'creation_time': creation_time,
|
||||
'meta': {
|
||||
'tenantId': 1
|
||||
}
|
||||
}
|
||||
msgs_data.append(msg)
|
||||
|
||||
instance.send_message(msgs_data)
|
||||
|
||||
self.assertEqual(len(topics),
|
||||
instance._kafka_publisher.publish.call_count)
|
||||
|
||||
@mock.patch(
|
||||
'monasca_log_api.reference.common.log_publisher.producer'
|
||||
'.KafkaProducer')
|
||||
def test_should_set_multiple_msgs_to_single_topic_diff_keys(self,
|
||||
publisher):
|
||||
topics = ['logs']
|
||||
self.conf.config(topics=topics, group='log_publisher')
|
||||
self.conf.config(max_log_size=5000, group='service')
|
||||
|
||||
instance = log_publisher.LogPublisher()
|
||||
instance._kafka_publisher = publisher
|
||||
|
||||
num_of_msgs = 3
|
||||
msgs_data = []
|
||||
creation_time = ((datetime.datetime.utcnow() - EPOCH_START)
|
||||
.total_seconds())
|
||||
for it in xrange(num_of_msgs):
|
||||
msg = {
|
||||
'log': {
|
||||
'message': it,
|
||||
'dimensions': {
|
||||
'hostname': 'localhost',
|
||||
'component': 'some_app_type.%d' % it,
|
||||
'path': '/var/log/test/%s/me.log' % it
|
||||
}
|
||||
},
|
||||
'creation_time': creation_time,
|
||||
'meta': {
|
||||
'tenantId': 1
|
||||
}
|
||||
}
|
||||
msgs_data.append(msg)
|
||||
|
||||
instance.send_message(msgs_data)
|
||||
|
||||
self.assertEqual(len(topics),
|
||||
instance._kafka_publisher.publish.call_count)
|
||||
|
|
Loading…
Reference in New Issue