Support for URLS instead of local log files, simple caching.

This commit is contained in:
Miguel Angel Ajo 2015-11-06 14:10:37 +01:00
parent 2a27eaba64
commit 183efe3b13
3 changed files with 56 additions and 15 deletions

View File

@ -48,4 +48,5 @@ The previous example would produce something like this::
Exit code: 0
Stdout: 'ha-2cdba01d-e4\nha-44dca3a9-44\nha-499d3db7-97\nha-55a19f5e-ef\nha-b2d04f15-f2\nha-b5b271a1-d8\nha-fa58d644-81\nint-br-enp7s0\nint-br-ex\nqr-34b826df-97\nqr-5d5ea109-48\nqr-6adcffbf-09\nqr-743ccaa6-7e\nqr-79b33879-32\nqr-c12e6e06-ff\nqr-dc662767-61\n'
References to http url files instead of local files is also supported. Files
will be cached locally to avoid re-downloading on next runs.

View File

@ -1,14 +1,48 @@
from __future__ import print_function
from datetime import datetime
import hashlib
import os
import sys
import tempfile
import urllib2
EXTRALINES_PADDING = " " * 40
CACHE_DIR = "%s/oslogmerger-cache/" % tempfile.gettempdir()
class OpenStackLog:
def __init__(self, filename):
self._file = open(filename, 'r')
self._open(filename)
def _open(self, filename):
self._filename = filename
if filename.startswith("http://"):
filename = self._cached_download(filename)
self._file = open(filename, 'r')
def _url_cache_path(self, url):
md5 = hashlib.md5()
md5.update(url)
return CACHE_DIR + md5.hexdigest() + ".log"
def _ensure_cache_dir(self):
if not os.path.exists(CACHE_DIR):
os.makedirs(CACHE_DIR)
def _cached_download(self, url):
self._ensure_cache_dir()
path = self._url_cache_path(url)
if os.path.isfile(path):
print("CACHED: %s at %s" % (url, path), file=sys.stderr)
return path
print("DOWNLOADING: %s to %s" % (url, path), file=sys.stderr)
http_in = urllib2.urlopen(url)
file_out = open(path, 'w')
file_out.write(http_in.read())
file_out.close()
http_in.close()
return path
def _extract_with_date(self, line):
try:
@ -50,7 +84,7 @@ class OpenStackLog:
def help():
print """oslogmerger tool
print ("""oslogmerger tool
usage instructions:
oslogmerger /path/log_file1[:ALIAS] /path/log_file2[:ALIAS2] ..
@ -65,12 +99,11 @@ alias. Use the aliases if you want shorter line lengths.
Y-m-d H:M:S.mmm PID LOG-LEVEL ............
Y-m-d H:M:S.mmm PID LOG-LEVEL ............
[ extra line info ..... ]
"""
""")
def process_logs(files):
if len(files)==0:
if len(files) == 0:
help()
return 1
all_entries = []
@ -80,26 +113,33 @@ def process_logs(files):
# check if filename has an alias for log output, in the form of
# /path/filename:alias
filename_and_alias = filename.split(':')
if len(filename_and_alias) > 1:
filename_alias[filename_and_alias[0]] = (
"[%s]" % filename_and_alias[1])
filename = filename_and_alias[0]
alias = filename_and_alias[1:]
if filename == 'http' and alias and alias[0].startswith('//'):
filename = filename_and_alias[0] + ':' + filename_and_alias[1]
alias = filename_and_alias[2:]
if alias:
filename_alias[filename] = "[%s]" % alias[0]
else:
filename_alias[filename] = filename
# read the log
oslog = OpenStackLog(filename_and_alias[0])
oslog = OpenStackLog(filename)
for entry in oslog.log_entries():
all_entries.append(entry)
sorted_entries = sorted(all_entries, key=lambda log_entry: log_entry[0])
for entry in sorted_entries:
(date_object, filename, pid, level, rest) = entry
print ' '.join(
[date_object.strftime("%Y-%m-%d %H:%M:%S.%f"),
filename_alias[filename], pid,
level, rest]).rstrip('\n')
print (' '.join(
[date_object.strftime("%Y-%m-%d %H:%M:%S.%f"),
filename_alias[filename], pid,
level, rest]).rstrip('\n'))
return 0
def main():
sys.exit(process_logs(sys.argv[1:]))

View File

@ -11,7 +11,7 @@ with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
setup(
name='oslogmerger',
version='1.0.2',
version='1.0.3',
description='Openstack Log merge tool',
long_description=long_description,