From 8a16dd537597dc4d5edf912db6d299a685504d00 Mon Sep 17 00:00:00 2001 From: Slawek Kaplonski Date: Wed, 25 Nov 2020 23:57:07 +0100 Subject: [PATCH] Add JournalLogDigger class to parse OpenStack services logs When we use devstack based environment, services like Nova or Neutron are logging to the journalctl instead of files. So we need to have som LogDigger class which will be able to parse such logs in same way like log files. This patch adds such class and also adds to the topology class parameters which defines what LogDigger class should be used for the topology and what are service names to log file names mappings. Change-Id: Ice25bfa0cae60dc2b5402e59f549e2287808ba87 --- tobiko/openstack/neutron/__init__.py | 1 + tobiko/openstack/topology/_topology.py | 13 +++++++++ tobiko/shell/files/__init__.py | 1 + tobiko/shell/files/_logs.py | 34 +++++++++++++++++++--- tobiko/shell/grep.py | 28 ++++++++++++++++++ tobiko/tests/scenario/neutron/test_port.py | 12 ++++++-- tobiko/tripleo/_topology.py | 12 ++++++++ 7 files changed, 94 insertions(+), 7 deletions(-) diff --git a/tobiko/openstack/neutron/__init__.py b/tobiko/openstack/neutron/__init__.py index 153642943..b05036d3e 100644 --- a/tobiko/openstack/neutron/__init__.py +++ b/tobiko/openstack/neutron/__init__.py @@ -22,6 +22,7 @@ from tobiko.openstack.neutron import _port from tobiko.openstack.neutron import _network +SERVER = 'neutron-server' DHCP_AGENT = _agent.DHCP_AGENT L3_AGENT = _agent.L3_AGENT METADATA_AGENT = _agent.METADATA_AGENT diff --git a/tobiko/openstack/topology/_topology.py b/tobiko/openstack/topology/_topology.py index d89f8c31b..443415883 100644 --- a/tobiko/openstack/topology/_topology.py +++ b/tobiko/openstack/topology/_topology.py @@ -25,6 +25,7 @@ from six.moves.urllib import parse import tobiko from tobiko import docker from tobiko import podman +from tobiko.shell import files from tobiko.shell import ip from tobiko.shell import sh from tobiko.shell import ssh @@ -192,6 +193,18 @@ class OpenStackTopology(tobiko.SharedFixture): _connections = tobiko.required_setup_fixture( _connection.SSHConnectionManager) + # In Devstack based env logs can be accessed by journalctl + file_digger_class: typing.Type + file_digger_class = files.JournalLogDigger + + # This is dict which handles mapping of the log file and systemd_unit (if + # needed) for the OpenStack services. + # In case of Devstack topology file name in fact name of the systemd unit + # as logs are stored in journalctl + log_names_mappings = { + neutron.SERVER: 'devstack@q-svc', + } + def __init__(self): super(OpenStackTopology, self).__init__() self._names: typing.Dict[str, OpenStackTopologyNode] = ( diff --git a/tobiko/shell/files/__init__.py b/tobiko/shell/files/__init__.py index 7ae6d171d..06acc92c7 100644 --- a/tobiko/shell/files/__init__.py +++ b/tobiko/shell/files/__init__.py @@ -19,4 +19,5 @@ from tobiko.shell.files import _logs LogFileDigger = _logs.LogFileDigger +JournalLogDigger = _logs.JournalLogDigger MultihostLogFileDigger = _logs.MultihostLogFileDigger diff --git a/tobiko/shell/files/_logs.py b/tobiko/shell/files/_logs.py index fc6ec3f8d..879c0d8f4 100644 --- a/tobiko/shell/files/_logs.py +++ b/tobiko/shell/files/_logs.py @@ -57,10 +57,35 @@ class LogFileDigger(object): **self.execute_params) +class JournalLogDigger(LogFileDigger): + + def find_lines(self, pattern, new_lines=False): + dump_log_cmd = [ + "journalctl", "--unit", self.filename, + "--since", "5 minutes ago"] + try: + lines = frozenset( + grep.grep_lines(pattern=pattern, + command=dump_log_cmd, + **self.execute_params)) + except grep.NoMatchingLinesFound: + if new_lines: + return frozenset() + else: + lines -= self.found + self.found.update(lines) + if new_lines: + return lines + return frozenset(self.found) + + class MultihostLogFileDigger(object): - def __init__(self, filename, ssh_clients=None, **execute_params): + def __init__(self, filename, ssh_clients=None, + file_digger_class=LogFileDigger, + **execute_params): self.diggers = collections.OrderedDict() + self.file_digger_class = file_digger_class self.filename = filename self.execute_params = execute_params if ssh_clients: @@ -70,9 +95,10 @@ class MultihostLogFileDigger(object): def add_host(self, hostname=None, ssh_client=None): hostname = hostname or sh.get_hostname(ssh_client=ssh_client) if hostname not in self.diggers: - self.diggers[hostname] = LogFileDigger(filename=self.filename, - ssh_client=ssh_client, - **self.execute_params) + self.diggers[hostname] = self.file_digger_class( + filename=self.filename, + ssh_client=ssh_client, + **self.execute_params) def find_lines(self, pattern, new_lines=False): lines = [] diff --git a/tobiko/shell/grep.py b/tobiko/shell/grep.py index fedb18c28..3aa22e7c3 100644 --- a/tobiko/shell/grep.py +++ b/tobiko/shell/grep.py @@ -15,6 +15,7 @@ # under the License. from __future__ import absolute_import +import re import typing # noqa import tobiko @@ -56,3 +57,30 @@ def grep_files(pattern: str, raise NoMatchingLinesFound(pattern=pattern, files=files, login=ssh_client and ssh_client.login or None) + + +def grep_lines(pattern: str, + command: sh.ShellCommandType, + ssh_client: ssh.SSHClientFixture = None, + **execute_params) -> typing.List[str]: + if not pattern: + raise ValueError("Pattern string can't be empty") + command_line = sh.shell_command(command) + try: + result = sh.execute(command_line, + ssh_client=ssh_client, + **execute_params) + except sh.ShellCommandFailed as ex: + if ex.exit_status > 1: + # Some unknown problem occurred + raise + else: + output_lines: typing.List[str] = [] + r = re.compile(pattern) + output_lines = [line for line in result.stdout.splitlines() + if r.search(line)] + if output_lines: + return output_lines + raise NoMatchingLinesFound(pattern=pattern, + files=command, + login=ssh_client and ssh_client.login or None) diff --git a/tobiko/tests/scenario/neutron/test_port.py b/tobiko/tests/scenario/neutron/test_port.py index c3919d6ff..04bb37288 100644 --- a/tobiko/tests/scenario/neutron/test_port.py +++ b/tobiko/tests/scenario/neutron/test_port.py @@ -156,12 +156,18 @@ class PortLogsTest(testtools.TestCase): stack = tobiko.required_setup_fixture(PortLogsStack) - LOG_FILENAME = '/var/log/containers/neutron/server.log*' + def setUp(self): + super(PortLogsTest, self).setUp() + os_topology = topology.get_openstack_topology() + self.LOG_FILENAME = os_topology.log_names_mappings[neutron.SERVER] + self.FILE_DIGGER_CLASS = os_topology.file_digger_class def test_nova_port_notification(self): pattern = f'Nova.+event.+response.*{self.stack.server_id}' - log_digger = files.MultihostLogFileDigger(filename=self.LOG_FILENAME, - sudo=True) + log_digger = files.MultihostLogFileDigger( + filename=self.LOG_FILENAME, + file_digger_class=self.FILE_DIGGER_CLASS, + sudo=True) for node in topology.list_openstack_nodes(group='controller'): log_digger.add_host(hostname=node.hostname, ssh_client=node.ssh_client) diff --git a/tobiko/tripleo/_topology.py b/tobiko/tripleo/_topology.py index 028f8c3d4..addd3f58f 100644 --- a/tobiko/tripleo/_topology.py +++ b/tobiko/tripleo/_topology.py @@ -20,6 +20,7 @@ from oslo_log import log from tobiko.openstack import neutron from tobiko.openstack import topology +from tobiko.shell import files from tobiko.tripleo import _overcloud from tobiko.tripleo import _undercloud @@ -52,6 +53,17 @@ class TripleoTopology(topology.OpenStackTopology): # TODO: add more known subgrups here known_subgroups: typing.List[str] = ['controller', 'compute'] + # In TripleO we need to parse log files directly + file_digger_class = files.LogFileDigger + + # This is dict which handles mapping of the log file and systemd_unit (if + # needed) for the OpenStack services + # Format of this dict is like below: + # service_name: (log_filename, systemd_unit_name) + log_names_mappings = { + neutron.SERVER: '/var/log/containers/neutron/server.log*', + } + def discover_nodes(self): self.discover_undercloud_nodes() self.discover_overcloud_nodes()