Mantain read line ordering

Change-Id: I6c03549753ca86ed3c0ef1ac1b0a5c0f01a8a31c
This commit is contained in:
Federico Ressi 2021-07-15 14:45:32 +02:00
parent f65daf7422
commit 0f607b95c9
1 changed files with 78 additions and 37 deletions

View File

@ -20,6 +20,8 @@ import os
import shlex import shlex
import typing import typing
from oslo_log import log
import tobiko import tobiko
from tobiko.shell import grep from tobiko.shell import grep
from tobiko.shell import find from tobiko.shell import find
@ -27,9 +29,12 @@ from tobiko.shell import sh
from tobiko.shell import ssh from tobiko.shell import ssh
LOG = log.getLogger(__name__)
class LogFileDigger(tobiko.SharedFixture): class LogFileDigger(tobiko.SharedFixture):
found: typing.Optional[typing.Set[str]] = None found: typing.MutableMapping[str, None]
def __init__(self, filename: str, def __init__(self, filename: str,
pattern: typing.Optional[str] = None, pattern: typing.Optional[str] = None,
@ -38,17 +43,23 @@ class LogFileDigger(tobiko.SharedFixture):
self.filename = filename self.filename = filename
self.pattern = pattern self.pattern = pattern
self.execute_params = execute_params self.execute_params = execute_params
self.found = collections.OrderedDict()
def setup_fixture(self): def setup_fixture(self):
if self.pattern is not None: if self.pattern is not None:
self.find_lines() self.find_lines()
def cleanup_fixture(self): def cleanup_fixture(self):
self.found = None self.found.clear()
@property
def found_lines(self) -> typing.List[str]:
return list(self.found)
def find_lines(self, def find_lines(self,
pattern: typing.Optional[str] = None, pattern: str = None,
new_lines=False) -> typing.FrozenSet[str]: new_lines=False) \
-> typing.List[str]:
if pattern is None: if pattern is None:
pattern = self.pattern pattern = self.pattern
if pattern is None: if pattern is None:
@ -56,30 +67,42 @@ class LogFileDigger(tobiko.SharedFixture):
elif self.pattern is not None: elif self.pattern is not None:
raise NotImplementedError( raise NotImplementedError(
"Combining patterns is not supported") "Combining patterns is not supported")
found = self.found
if found is None:
self.found = found = set()
try: try:
lines = frozenset(self.grep_lines(pattern)) lines = self.grep_lines(pattern,
new_lines=new_lines)
except grep.NoMatchingLinesFound: except grep.NoMatchingLinesFound:
if new_lines: lines = []
return frozenset()
else: else:
lines -= found if lines:
found.update(lines) lines = [line
for line in lines
if line not in self.found]
self.found.update((line, None)
for line in lines)
if new_lines: if new_lines:
if lines:
lines_text = '\n\t'.join(lines)
LOG.debug('Found new lines:\n'
f"\t{lines_text}")
return lines return lines
return frozenset(found) else:
return list(self.found)
def find_new_lines(self, pattern: typing.Optional[str] = None) -> \ def find_new_lines(self,
typing.FrozenSet[str]: pattern: str = None) \
return self.find_lines(pattern=pattern, new_lines=True) -> typing.List[str]:
return self.find_lines(pattern=pattern,
new_lines=True)
def grep_lines(self, pattern) -> typing.List[str]: def grep_lines(self,
pattern: str,
new_lines: bool = False) \
-> typing.List[str]:
# pylint: disable=unused-argument
log_files = self.list_log_files() log_files = self.list_log_files()
return grep.grep_files(pattern=pattern, files=log_files, return grep.grep_files(pattern=pattern,
files=log_files,
**self.execute_params) **self.execute_params)
def list_log_files(self): def list_log_files(self):
@ -91,11 +114,14 @@ class LogFileDigger(tobiko.SharedFixture):
class JournalLogDigger(LogFileDigger): class JournalLogDigger(LogFileDigger):
def grep_lines(self, pattern) -> typing.List[str]: def grep_lines(self,
pattern: str,
new_lines: bool = False) \
-> typing.List[str]:
try: try:
result = sh.execute(["journalctl", '--no-pager', result = sh.execute(["journalctl", '--no-pager',
"--unit", shlex.quote(self.filename), "--unit", shlex.quote(self.filename),
"--since", "30 minutes ago", # "--since", "30 minutes ago",
'--output', 'short-iso', '--output', 'short-iso',
'--grep', shlex.quote(pattern)], '--grep', shlex.quote(pattern)],
**self.execute_params) **self.execute_params)
@ -107,6 +133,7 @@ class JournalLogDigger(LogFileDigger):
files=[self.filename], files=[self.filename],
login=ssh_client and ssh_client.login or None) login=ssh_client and ssh_client.login or None)
else: else:
LOG.exception(f"Error executing journalctl: {ex.stderr}")
return [] return []
else: else:
lines = [line lines = [line
@ -122,17 +149,16 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
def __init__( def __init__(
self, self,
filename: str, filename: str,
ssh_clients: typing.Optional[ ssh_clients: typing.Iterable[ssh.SSHClientType] = None,
typing.Iterable[ssh.SSHClientFixture]] = None,
file_digger_class: typing.Type[LogFileDigger] = LogFileDigger, file_digger_class: typing.Type[LogFileDigger] = LogFileDigger,
pattern: typing.Optional[str] = None, pattern: str = None,
**execute_params): **execute_params):
super(MultihostLogFileDigger, self).__init__() super(MultihostLogFileDigger, self).__init__()
self.file_digger_class = file_digger_class self.file_digger_class = file_digger_class
self.filename = filename self.filename = filename
self.execute_params = execute_params self.execute_params = execute_params
self.pattern = pattern self.pattern = pattern
self.ssh_clients: typing.List[ssh.SSHClientFixture] = list() self.ssh_clients: typing.List[ssh.SSHClientType] = []
if ssh_clients is not None: if ssh_clients is not None:
self.ssh_clients.extend(ssh_clients) self.ssh_clients.extend(ssh_clients)
@ -146,8 +172,9 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
def cleanup_fixture(self): def cleanup_fixture(self):
self.diggers = None self.diggers = None
def add_host(self, hostname: typing.Optional[str] = None, def add_host(self,
ssh_client: typing.Optional[ssh.SSHClientFixture] = None): ssh_client: ssh.SSHClientType,
hostname: str = None):
if self.diggers is None: if self.diggers is None:
self.diggers = collections.OrderedDict() self.diggers = collections.OrderedDict()
if hostname is None: if hostname is None:
@ -161,11 +188,22 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
**self.execute_params) **self.execute_params)
return digger return digger
@property
def found_lines(self) \
-> typing.List[typing.Tuple[str, str]]:
# ensure diggers are ready before looking for lines
tobiko.setup_fixture(self)
lines: typing.List[typing.Tuple[str, str]] = []
if self.diggers is not None:
for hostname, digger in self.diggers.items():
for line in digger.found_lines:
lines.append((hostname, line))
return lines
def find_lines(self, def find_lines(self,
pattern: typing.Optional[str] = None, pattern: str = None,
new_lines: bool = False) \ new_lines: bool = False) \
-> typing.List[typing.Tuple[str, str]]: -> typing.List[typing.Tuple[str, str]]:
# ensure diggers are ready before looking for lines # ensure diggers are ready before looking for lines
tobiko.setup_fixture(self) tobiko.setup_fixture(self)
lines: typing.List[typing.Tuple[str, str]] = [] lines: typing.List[typing.Tuple[str, str]] = []
@ -177,8 +215,8 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
return lines return lines
def find_new_lines(self, def find_new_lines(self,
pattern: typing.Optional[str] = None, pattern: str = None,
retry_count: typing.Optional[int] = None, retry_count: int = None,
retry_timeout: tobiko.Seconds = 60., retry_timeout: tobiko.Seconds = 60.,
retry_interval: tobiko.Seconds = None) \ retry_interval: tobiko.Seconds = None) \
-> typing.List[typing.Tuple[str, str]]: -> typing.List[typing.Tuple[str, str]]:
@ -187,8 +225,11 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
interval=retry_interval, interval=retry_interval,
default_interval=1., default_interval=1.,
default_timeout=60.): default_timeout=60.):
new_lines = self.find_lines(pattern=pattern, new_lines=True) new_lines = self.find_lines(pattern=pattern,
new_lines=True)
if new_lines: if new_lines:
return new_lines break
else:
raise RuntimeError("Internal bug: retry loop break itself.") raise RuntimeError("Internal bug: retry loop break itself.")
return new_lines