Mantain read line ordering

Change-Id: I6c03549753ca86ed3c0ef1ac1b0a5c0f01a8a31c
This commit is contained in:
Federico Ressi 2021-07-15 14:45:32 +02:00
parent f65daf7422
commit 0f607b95c9
1 changed files with 78 additions and 37 deletions

View File

@ -20,6 +20,8 @@ import os
import shlex
import typing
from oslo_log import log
import tobiko
from tobiko.shell import grep
from tobiko.shell import find
@ -27,9 +29,12 @@ from tobiko.shell import sh
from tobiko.shell import ssh
LOG = log.getLogger(__name__)
class LogFileDigger(tobiko.SharedFixture):
found: typing.Optional[typing.Set[str]] = None
found: typing.MutableMapping[str, None]
def __init__(self, filename: str,
pattern: typing.Optional[str] = None,
@ -38,17 +43,23 @@ class LogFileDigger(tobiko.SharedFixture):
self.filename = filename
self.pattern = pattern
self.execute_params = execute_params
self.found = collections.OrderedDict()
def setup_fixture(self):
if self.pattern is not None:
self.find_lines()
def cleanup_fixture(self):
self.found = None
self.found.clear()
@property
def found_lines(self) -> typing.List[str]:
return list(self.found)
def find_lines(self,
pattern: typing.Optional[str] = None,
new_lines=False) -> typing.FrozenSet[str]:
pattern: str = None,
new_lines=False) \
-> typing.List[str]:
if pattern is None:
pattern = self.pattern
if pattern is None:
@ -56,30 +67,42 @@ class LogFileDigger(tobiko.SharedFixture):
elif self.pattern is not None:
raise NotImplementedError(
"Combining patterns is not supported")
found = self.found
if found is None:
self.found = found = set()
try:
lines = frozenset(self.grep_lines(pattern))
lines = self.grep_lines(pattern,
new_lines=new_lines)
except grep.NoMatchingLinesFound:
if new_lines:
return frozenset()
lines = []
else:
lines -= found
found.update(lines)
if lines:
lines = [line
for line in lines
if line not in self.found]
self.found.update((line, None)
for line in lines)
if new_lines:
if lines:
lines_text = '\n\t'.join(lines)
LOG.debug('Found new lines:\n'
f"\t{lines_text}")
return lines
return frozenset(found)
else:
return list(self.found)
def find_new_lines(self, pattern: typing.Optional[str] = None) -> \
typing.FrozenSet[str]:
return self.find_lines(pattern=pattern, new_lines=True)
def find_new_lines(self,
pattern: str = None) \
-> typing.List[str]:
return self.find_lines(pattern=pattern,
new_lines=True)
def grep_lines(self, pattern) -> typing.List[str]:
def grep_lines(self,
pattern: str,
new_lines: bool = False) \
-> typing.List[str]:
# pylint: disable=unused-argument
log_files = self.list_log_files()
return grep.grep_files(pattern=pattern, files=log_files,
return grep.grep_files(pattern=pattern,
files=log_files,
**self.execute_params)
def list_log_files(self):
@ -91,11 +114,14 @@ class LogFileDigger(tobiko.SharedFixture):
class JournalLogDigger(LogFileDigger):
def grep_lines(self, pattern) -> typing.List[str]:
def grep_lines(self,
pattern: str,
new_lines: bool = False) \
-> typing.List[str]:
try:
result = sh.execute(["journalctl", '--no-pager',
"--unit", shlex.quote(self.filename),
"--since", "30 minutes ago",
# "--since", "30 minutes ago",
'--output', 'short-iso',
'--grep', shlex.quote(pattern)],
**self.execute_params)
@ -107,6 +133,7 @@ class JournalLogDigger(LogFileDigger):
files=[self.filename],
login=ssh_client and ssh_client.login or None)
else:
LOG.exception(f"Error executing journalctl: {ex.stderr}")
return []
else:
lines = [line
@ -122,17 +149,16 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
def __init__(
self,
filename: str,
ssh_clients: typing.Optional[
typing.Iterable[ssh.SSHClientFixture]] = None,
ssh_clients: typing.Iterable[ssh.SSHClientType] = None,
file_digger_class: typing.Type[LogFileDigger] = LogFileDigger,
pattern: typing.Optional[str] = None,
pattern: str = None,
**execute_params):
super(MultihostLogFileDigger, self).__init__()
self.file_digger_class = file_digger_class
self.filename = filename
self.execute_params = execute_params
self.pattern = pattern
self.ssh_clients: typing.List[ssh.SSHClientFixture] = list()
self.ssh_clients: typing.List[ssh.SSHClientType] = []
if ssh_clients is not None:
self.ssh_clients.extend(ssh_clients)
@ -146,8 +172,9 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
def cleanup_fixture(self):
self.diggers = None
def add_host(self, hostname: typing.Optional[str] = None,
ssh_client: typing.Optional[ssh.SSHClientFixture] = None):
def add_host(self,
ssh_client: ssh.SSHClientType,
hostname: str = None):
if self.diggers is None:
self.diggers = collections.OrderedDict()
if hostname is None:
@ -161,11 +188,22 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
**self.execute_params)
return digger
@property
def found_lines(self) \
-> typing.List[typing.Tuple[str, str]]:
# ensure diggers are ready before looking for lines
tobiko.setup_fixture(self)
lines: typing.List[typing.Tuple[str, str]] = []
if self.diggers is not None:
for hostname, digger in self.diggers.items():
for line in digger.found_lines:
lines.append((hostname, line))
return lines
def find_lines(self,
pattern: typing.Optional[str] = None,
pattern: str = None,
new_lines: bool = False) \
-> typing.List[typing.Tuple[str, str]]:
# ensure diggers are ready before looking for lines
tobiko.setup_fixture(self)
lines: typing.List[typing.Tuple[str, str]] = []
@ -177,8 +215,8 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
return lines
def find_new_lines(self,
pattern: typing.Optional[str] = None,
retry_count: typing.Optional[int] = None,
pattern: str = None,
retry_count: int = None,
retry_timeout: tobiko.Seconds = 60.,
retry_interval: tobiko.Seconds = None) \
-> typing.List[typing.Tuple[str, str]]:
@ -187,8 +225,11 @@ class MultihostLogFileDigger(tobiko.SharedFixture):
interval=retry_interval,
default_interval=1.,
default_timeout=60.):
new_lines = self.find_lines(pattern=pattern, new_lines=True)
new_lines = self.find_lines(pattern=pattern,
new_lines=True)
if new_lines:
return new_lines
break
else:
raise RuntimeError("Internal bug: retry loop break itself.")
return new_lines