Merge "Add JournalLogDigger class to parse OpenStack services logs"

This commit is contained in:
Zuul 2020-11-27 17:25:26 +00:00 committed by Gerrit Code Review
commit e68bc18773
7 changed files with 94 additions and 7 deletions

View File

@ -22,6 +22,7 @@ from tobiko.openstack.neutron import _port
from tobiko.openstack.neutron import _network
SERVER = 'neutron-server'
DHCP_AGENT = _agent.DHCP_AGENT
L3_AGENT = _agent.L3_AGENT
METADATA_AGENT = _agent.METADATA_AGENT

View File

@ -25,6 +25,7 @@ from six.moves.urllib import parse
import tobiko
from tobiko import docker
from tobiko import podman
from tobiko.shell import files
from tobiko.shell import ip
from tobiko.shell import sh
from tobiko.shell import ssh
@ -192,6 +193,18 @@ class OpenStackTopology(tobiko.SharedFixture):
_connections = tobiko.required_setup_fixture(
_connection.SSHConnectionManager)
# In Devstack based env logs can be accessed by journalctl
file_digger_class: typing.Type
file_digger_class = files.JournalLogDigger
# This is dict which handles mapping of the log file and systemd_unit (if
# needed) for the OpenStack services.
# In case of Devstack topology file name in fact name of the systemd unit
# as logs are stored in journalctl
log_names_mappings = {
neutron.SERVER: 'devstack@q-svc',
}
def __init__(self):
super(OpenStackTopology, self).__init__()
self._names: typing.Dict[str, OpenStackTopologyNode] = (

View File

@ -19,4 +19,5 @@ from tobiko.shell.files import _logs
LogFileDigger = _logs.LogFileDigger
JournalLogDigger = _logs.JournalLogDigger
MultihostLogFileDigger = _logs.MultihostLogFileDigger

View File

@ -57,10 +57,35 @@ class LogFileDigger(object):
**self.execute_params)
class JournalLogDigger(LogFileDigger):
def find_lines(self, pattern, new_lines=False):
dump_log_cmd = [
"journalctl", "--unit", self.filename,
"--since", "5 minutes ago"]
try:
lines = frozenset(
grep.grep_lines(pattern=pattern,
command=dump_log_cmd,
**self.execute_params))
except grep.NoMatchingLinesFound:
if new_lines:
return frozenset()
else:
lines -= self.found
self.found.update(lines)
if new_lines:
return lines
return frozenset(self.found)
class MultihostLogFileDigger(object):
def __init__(self, filename, ssh_clients=None, **execute_params):
def __init__(self, filename, ssh_clients=None,
file_digger_class=LogFileDigger,
**execute_params):
self.diggers = collections.OrderedDict()
self.file_digger_class = file_digger_class
self.filename = filename
self.execute_params = execute_params
if ssh_clients:
@ -70,9 +95,10 @@ class MultihostLogFileDigger(object):
def add_host(self, hostname=None, ssh_client=None):
hostname = hostname or sh.get_hostname(ssh_client=ssh_client)
if hostname not in self.diggers:
self.diggers[hostname] = LogFileDigger(filename=self.filename,
ssh_client=ssh_client,
**self.execute_params)
self.diggers[hostname] = self.file_digger_class(
filename=self.filename,
ssh_client=ssh_client,
**self.execute_params)
def find_lines(self, pattern, new_lines=False):
lines = []

View File

@ -15,6 +15,7 @@
# under the License.
from __future__ import absolute_import
import re
import typing # noqa
import tobiko
@ -56,3 +57,30 @@ def grep_files(pattern: str,
raise NoMatchingLinesFound(pattern=pattern,
files=files,
login=ssh_client and ssh_client.login or None)
def grep_lines(pattern: str,
command: sh.ShellCommandType,
ssh_client: ssh.SSHClientFixture = None,
**execute_params) -> typing.List[str]:
if not pattern:
raise ValueError("Pattern string can't be empty")
command_line = sh.shell_command(command)
try:
result = sh.execute(command_line,
ssh_client=ssh_client,
**execute_params)
except sh.ShellCommandFailed as ex:
if ex.exit_status > 1:
# Some unknown problem occurred
raise
else:
output_lines: typing.List[str] = []
r = re.compile(pattern)
output_lines = [line for line in result.stdout.splitlines()
if r.search(line)]
if output_lines:
return output_lines
raise NoMatchingLinesFound(pattern=pattern,
files=command,
login=ssh_client and ssh_client.login or None)

View File

@ -156,12 +156,18 @@ class PortLogsTest(testtools.TestCase):
stack = tobiko.required_setup_fixture(PortLogsStack)
LOG_FILENAME = '/var/log/containers/neutron/server.log*'
def setUp(self):
super(PortLogsTest, self).setUp()
os_topology = topology.get_openstack_topology()
self.LOG_FILENAME = os_topology.log_names_mappings[neutron.SERVER]
self.FILE_DIGGER_CLASS = os_topology.file_digger_class
def test_nova_port_notification(self):
pattern = f'Nova.+event.+response.*{self.stack.server_id}'
log_digger = files.MultihostLogFileDigger(filename=self.LOG_FILENAME,
sudo=True)
log_digger = files.MultihostLogFileDigger(
filename=self.LOG_FILENAME,
file_digger_class=self.FILE_DIGGER_CLASS,
sudo=True)
for node in topology.list_openstack_nodes(group='controller'):
log_digger.add_host(hostname=node.hostname,
ssh_client=node.ssh_client)

View File

@ -20,6 +20,7 @@ from oslo_log import log
from tobiko.openstack import neutron
from tobiko.openstack import topology
from tobiko.shell import files
from tobiko.tripleo import _overcloud
from tobiko.tripleo import _undercloud
@ -52,6 +53,17 @@ class TripleoTopology(topology.OpenStackTopology):
# TODO: add more known subgrups here
known_subgroups: typing.List[str] = ['controller', 'compute']
# In TripleO we need to parse log files directly
file_digger_class = files.LogFileDigger
# This is dict which handles mapping of the log file and systemd_unit (if
# needed) for the OpenStack services
# Format of this dict is like below:
# service_name: (log_filename, systemd_unit_name)
log_names_mappings = {
neutron.SERVER: '/var/log/containers/neutron/server.log*',
}
def discover_nodes(self):
self.discover_undercloud_nodes()
self.discover_overcloud_nodes()