Add option to file publisher to write json
Change-Id: I21f6f8fca8de1d6b8784e140382d1a1a99398279 Closes-Bug: #1783985
This commit is contained in:
parent
a74978619e
commit
063af43744
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
|
|
||||||
@ -41,12 +42,13 @@ class FilePublisher(publisher.ConfigPublisherBase):
|
|||||||
meters:
|
meters:
|
||||||
- "*"
|
- "*"
|
||||||
publishers:
|
publishers:
|
||||||
- file:///var/test?max_bytes=10000000&backup_count=5
|
- file:///var/test?max_bytes=10000000&backup_count=5&json
|
||||||
|
|
||||||
File path is required for this publisher to work properly. If max_bytes
|
File path is required for this publisher to work properly. If max_bytes
|
||||||
or backup_count is missing, FileHandler will be used to save the metering
|
or backup_count is missing, FileHandler will be used to save the metering
|
||||||
data. If max_bytes and backup_count are present, RotatingFileHandler will
|
data. If max_bytes and backup_count are present, RotatingFileHandler will
|
||||||
be used to save the metering data.
|
be used to save the metering data. The json argument is used to explicitely
|
||||||
|
ask ceilometer to write json into the file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, conf, parsed_url):
|
def __init__(self, conf, parsed_url):
|
||||||
@ -61,9 +63,13 @@ class FilePublisher(publisher.ConfigPublisherBase):
|
|||||||
rfh = None
|
rfh = None
|
||||||
max_bytes = 0
|
max_bytes = 0
|
||||||
backup_count = 0
|
backup_count = 0
|
||||||
|
self.output_json = None
|
||||||
# Handling other configuration options in the query string
|
# Handling other configuration options in the query string
|
||||||
if parsed_url.query:
|
if parsed_url.query:
|
||||||
params = urlparse.parse_qs(parsed_url.query)
|
params = urlparse.parse_qs(parsed_url.query,
|
||||||
|
keep_blank_values=True)
|
||||||
|
if "json" in params:
|
||||||
|
self.output_json = True
|
||||||
if params.get('max_bytes') and params.get('backup_count'):
|
if params.get('max_bytes') and params.get('backup_count'):
|
||||||
try:
|
try:
|
||||||
max_bytes = int(params.get('max_bytes')[0])
|
max_bytes = int(params.get('max_bytes')[0])
|
||||||
@ -90,7 +96,10 @@ class FilePublisher(publisher.ConfigPublisherBase):
|
|||||||
"""
|
"""
|
||||||
if self.publisher_logger:
|
if self.publisher_logger:
|
||||||
for sample in samples:
|
for sample in samples:
|
||||||
self.publisher_logger.info(sample.as_dict())
|
if self.output_json:
|
||||||
|
self.publisher_logger.info(json.dumps(sample.as_dict()))
|
||||||
|
else:
|
||||||
|
self.publisher_logger.info(sample.as_dict())
|
||||||
|
|
||||||
def publish_events(self, events):
|
def publish_events(self, events):
|
||||||
"""Send an event message for publishing
|
"""Send an event message for publishing
|
||||||
@ -99,4 +108,7 @@ class FilePublisher(publisher.ConfigPublisherBase):
|
|||||||
"""
|
"""
|
||||||
if self.publisher_logger:
|
if self.publisher_logger:
|
||||||
for event in events:
|
for event in events:
|
||||||
self.publisher_logger.info(event.as_dict())
|
if self.output_json:
|
||||||
|
self.publisher_logger.info(json.dumps(event.as_dict()))
|
||||||
|
else:
|
||||||
|
self.publisher_logger.info(event.as_dict())
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import json
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
@ -120,3 +121,32 @@ class TestFilePublisher(base.BaseTestCase):
|
|||||||
publisher.publish_samples(self.test_data)
|
publisher.publish_samples(self.test_data)
|
||||||
|
|
||||||
self.assertIsNone(publisher.publisher_logger)
|
self.assertIsNone(publisher.publisher_logger)
|
||||||
|
|
||||||
|
def test_file_publisher_json(self):
|
||||||
|
tempdir = tempfile.mkdtemp()
|
||||||
|
name = '%s/log_file_json' % tempdir
|
||||||
|
parsed_url = netutils.urlsplit('file://%s?json' % name)
|
||||||
|
|
||||||
|
publisher = file.FilePublisher(self.CONF, parsed_url)
|
||||||
|
publisher.publish_samples(self.test_data)
|
||||||
|
|
||||||
|
handler = publisher.publisher_logger.handlers[0]
|
||||||
|
self.assertIsInstance(handler,
|
||||||
|
logging.handlers.RotatingFileHandler)
|
||||||
|
self.assertEqual([0, name, 0], [handler.maxBytes,
|
||||||
|
handler.baseFilename,
|
||||||
|
handler.backupCount])
|
||||||
|
self.assertTrue(os.path.exists(name))
|
||||||
|
with open(name, 'r') as f:
|
||||||
|
content = f.readlines()
|
||||||
|
|
||||||
|
self.assertEqual(len(self.test_data), len(content))
|
||||||
|
for index, line in enumerate(content):
|
||||||
|
try:
|
||||||
|
json_data = json.loads(line)
|
||||||
|
except ValueError:
|
||||||
|
self.fail("File written is not valid json")
|
||||||
|
self.assertEqual(self.test_data[index].id,
|
||||||
|
json_data['id'])
|
||||||
|
self.assertEqual(self.test_data[index].timestamp,
|
||||||
|
json_data['timestamp'])
|
||||||
|
@ -247,6 +247,10 @@ The following options are available for the ``file`` publisher:
|
|||||||
the newest data is always the one that is specified without any
|
the newest data is always the one that is specified without any
|
||||||
extensions.
|
extensions.
|
||||||
|
|
||||||
|
``json``
|
||||||
|
If this option is present, will force ceilometer to write json format
|
||||||
|
into the file.
|
||||||
|
|
||||||
http
|
http
|
||||||
````
|
````
|
||||||
|
|
||||||
|
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
features:
|
||||||
|
- >
|
||||||
|
Add new json output option for the existing file publisher.
|
||||||
|
|
Loading…
Reference in New Issue
Block a user