Enable ansible-test units

- converts lonely unit test to use the official unittest format
for ansible collections.
- adds two tests to sova module
- moves sovalib into sova module as this is required in order to
  make the module compatible with both role and collection deployments.

Story: TRIPLEOCI-284
Change-Id: I6e0b2fa4a4b02fbf4133c28d29adaf0e3c16d344
This commit is contained in:
Sorin Sbarnea 2021-02-11 12:37:06 +00:00 committed by Sorin Sbârnea
parent 51160038a5
commit a5ce785e24
10 changed files with 211 additions and 236 deletions

View File

@ -1,106 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import gzip
import logging
try:
import regex as regex_module
except ImportError:
import re as regex_module
__metaclass__ = type
logging.basicConfig(
format=(
"%(asctime)s - %(name)s - %(levelname)s - "
"%(module)s.%(funcName)s:%(lineno)d - %(message)s"
)
)
log = logging.getLogger("parser")
log.setLevel(logging.ERROR)
class Pattern(object):
def __init__(self, data):
self.data = data
self.load_yaml()
self.setup_regexes()
self.setup_patterns()
def load_yaml(self):
import yaml
if isinstance(self.data, dict):
self.config = self.data
else:
self.config = yaml.safe_load(self.data)
def setup_regexes(self):
self.regexes = {}
if self.config:
for regexp in self.config.get("regexes", []):
flags = []
if regexp.get("multiline"):
flags.append(regex_module.MULTILINE)
self.regexes[regexp.get("name")] = regex_module.compile(
r"{0}".format(regexp.get("regex")), *flags
)
def setup_patterns(self):
self._patterns = self.config.get("patterns", {})
if self._patterns:
for key in self._patterns:
for p in self._patterns[key]:
if p["pattern"] in self.regexes:
p["pattern"] = self.regexes[p["pattern"]]
if p["logstash"] in self.regexes:
p["logstash"] = self.regexes[p["logstash"]]
@property
def patterns(self):
return self._patterns
def line_match(pat, line, exclude=None):
if isinstance(pat, str):
return pat in line
found = pat.search(line)
if not found:
return False
if found.groups():
if exclude:
if any([i in found.group(1) for i in exclude]):
return False
return found.group(1)
return True
def parse(text_file, patterns):
ids = []
msgs = []
if text_file.split(".")[-1] == "gz":
open_func = gzip.open
else:
open_func = open
with open_func(text_file, "rt") as finput:
text = finput.read()
for p in patterns:
line_matched = line_match(p["pattern"], text, exclude=p.get("exclude"))
if line_matched:
log.debug("Found pattern %s in file %s", repr(p), text_file)
ids.append(p["id"])
msgs.append(p["msg"].format(line_matched))
return list(set(ids)), list(set(msgs))

View File

@ -3,11 +3,15 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
import json
from unittest.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
try:
from unittest.mock import patch
except ImportError:
from mock import patch # old pythons
def set_module_args(**args):
if "_ansible_remote_tmp" not in args:

View File

@ -108,11 +108,102 @@ file_written:
sample: '/var/log/result_file'
"""
import gzip # noqa: E402
import logging # noqa: E402
import os # noqa: E402
from copy import deepcopy # noqa: E402
from ansible.module_utils.basic import AnsibleModule # noqa: E402
try:
import regex as regex_module
except ImportError:
import re as regex_module
__metaclass__ = type
logging.basicConfig(
format=(
"%(asctime)s - %(name)s - %(levelname)s - "
"%(module)s.%(funcName)s:%(lineno)d - %(message)s"
)
)
log = logging.getLogger("parser")
log.setLevel(logging.ERROR)
class Pattern(object):
def __init__(self, data):
self.data = data
self.load_yaml()
self.setup_regexes()
self.setup_patterns()
def load_yaml(self):
import yaml
if isinstance(self.data, dict):
self.config = self.data
else:
self.config = yaml.safe_load(self.data)
def setup_regexes(self):
self.regexes = {}
if self.config:
for regexp in self.config.get("regexes", []):
flags = []
if regexp.get("multiline"):
flags.append(regex_module.MULTILINE)
self.regexes[regexp.get("name")] = regex_module.compile(
r"{0}".format(regexp.get("regex")), *flags
)
def setup_patterns(self):
self._patterns = self.config.get("patterns", {})
if self._patterns:
for key in self._patterns:
for p in self._patterns[key]:
if p["pattern"] in self.regexes:
p["pattern"] = self.regexes[p["pattern"]]
if p["logstash"] in self.regexes:
p["logstash"] = self.regexes[p["logstash"]]
@property
def patterns(self):
return self._patterns
def line_match(pat, line, exclude=None):
if isinstance(pat, str):
return pat in line
found = pat.search(line)
if not found:
return False
if found.groups():
if exclude:
if any([i in found.group(1) for i in exclude]):
return False
return found.group(1)
return True
def parse(text_file, patterns):
ids = []
msgs = []
if text_file.split(".")[-1] == "gz":
open_func = gzip.open
else:
open_func = open
with open_func(text_file, "rt") as finput:
text = finput.read()
for p in patterns:
line_matched = line_match(p["pattern"], text, exclude=p.get("exclude"))
if line_matched:
log.debug("Found pattern %s in file %s", repr(p), text_file)
ids.append(p["id"])
msgs.append(p["msg"].format(line_matched))
return list(set(ids)), list(set(msgs))
def format_msg_filename(text):
for s in (
@ -149,9 +240,6 @@ def main():
module.exit_json(**results)
dict_patterns = deepcopy(module.params["config"])
# from sova_lib import Pattern, parse
from ansible.module_utils.sova_lib import Pattern, parse
pattern = Pattern(dict_patterns)
PATTERNS = pattern.patterns
for name in module.params["files"]:

View File

@ -1,106 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import gzip
import logging
import yaml
try:
import regex as regex_module
except ImportError:
import re as regex_module
__metaclass__ = type
logging.basicConfig(
format=(
"%(asctime)s - %(name)s - %(levelname)s - "
"%(module)s.%(funcName)s:%(lineno)d - %(message)s"
)
)
log = logging.getLogger("parser")
log.setLevel(logging.ERROR)
class Pattern(object):
def __init__(self, data):
self.data = data
self.load_yaml()
self.setup_regexes()
self.setup_patterns()
def load_yaml(self):
if isinstance(self.data, dict):
self.config = self.data
else:
self.config = yaml.safe_load(self.data)
def setup_regexes(self):
self.regexes = {}
if self.config:
for regexp in self.config.get("regexes", []):
flags = []
if regexp.get("multiline"):
flags.append(regex_module.MULTILINE)
self.regexes[regexp.get("name")] = regex_module.compile(
r"{0}".format(regexp.get("regex")), *flags
)
def setup_patterns(self):
self._patterns = self.config.get("patterns", {})
if self._patterns:
for key in self._patterns:
for p in self._patterns[key]:
if p["pattern"] in self.regexes:
p["pattern"] = self.regexes[p["pattern"]]
if p["logstash"] in self.regexes:
p["logstash"] = self.regexes[p["logstash"]]
@property
def patterns(self):
return self._patterns
def line_match(pat, line, exclude=None):
if isinstance(pat, str):
return pat in line
found = pat.search(line)
if not found:
return False
if found.groups():
if exclude:
if any([i in found.group(1) for i in exclude]):
return False
return found.group(1)
return True
def parse(text_file, patterns):
ids = []
msgs = []
if text_file.split(".")[-1] == "gz":
open_func = gzip.open
else:
open_func = open
with open_func(text_file, "rt") as finput:
text = finput.read()
for p in patterns:
line_matched = line_match(p["pattern"], text, exclude=p.get("exclude"))
if line_matched:
log.debug("Found pattern %s in file %s", repr(p), text_file)
ids.append(p["id"])
msgs.append(p["msg"].format(line_matched))
return list(set(ids)), list(set(msgs))

View File

@ -0,0 +1,3 @@
pytest
pytest-mock
mock; python_version < '3.3'

View File

@ -1,16 +1,36 @@
from __future__ import absolute_import, division, print_function
import os
import sys
import pytest # noqa
import pytest
import yaml
from common.utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
try:
# ansible-test style imports
from ansible_collections.tripleo.collect_logs.plugins.module_utils.test_utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
from ansible_collections.tripleo.collect_logs.plugins.modules import (
flatten_nested_dict,
)
except ImportError:
# avoid collection errors running: pytest --collect-only
import os
import sys
plugins_path = os.path.join(os.path.dirname(__file__), "../../plugins/")
plugins_path = os.path.realpath(plugins_path)
sys.path.append("%s/%s" % (plugins_path, "module_utils"))
sys.path.append("%s/%s" % (plugins_path, "modules"))
import flatten_nested_dict
from test_utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
__metaclass__ = type
SAMPLE_INPUT_1 = """
@ -29,12 +49,6 @@ data:
group: system
"""
# Temporary hack until we adopt official ansible-test unit-testing
dir = os.path.join(os.path.dirname(__file__), "../roles/collect_logs/library")
sys.path.append(dir)
print(dir)
import flatten_nested_dict # noqa: E402
class TestFlattenNestedDict(ModuleTestCase):
def test_invalid_args(self):

68
tests/unit/test_sova.py Normal file
View File

@ -0,0 +1,68 @@
from __future__ import absolute_import, division, print_function
import pytest
try:
# ansible-test style imports
from ansible_collections.tripleo.collect_logs.plugins.module_utils.test_utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
from ansible_collections.tripleo.collect_logs.plugins.modules import sova
except ImportError:
# avoid collection errors running: pytest --collect-only
import os
import sys
plugins_path = os.path.join(os.path.dirname(__file__), "../../plugins/")
plugins_path = os.path.realpath(plugins_path)
sys.path.append("%s/%s" % (plugins_path, "module_utils"))
sys.path.append("%s/%s" % (plugins_path, "modules"))
import sova
from test_utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
__metaclass__ = type
class TestFlattenNestedDict(ModuleTestCase):
def test_invalid_args(self):
set_module_args(
data="invalid",
)
with pytest.raises(AnsibleFailJson) as context:
sova.main()
assert context.value.args[0]["failed"] is True
assert "msg" in context.value.args[0]
def test_min(self):
set_module_args(
# just a file that exists on almost any platform
config={
"regexes": [{"regex": "127.0.0.1", "name": "hosts"}],
"patterns": {
"console": [
{
"id": 1,
"logstash": "",
"msg": "Overcloud stack installation: SUCCESS.",
"pattern": "Stack overcloud CREATE_COMPLETE",
"tag": "info",
}
]
},
},
files={"console": "/etc/hosts"},
)
with pytest.raises(AnsibleExitJson) as context:
sova.main()
assert context.value.args[0]["changed"] is True
assert context.value.args[0]["processed_files"] == ["/etc/hosts"]
assert "message" in context.value.args[0]
assert context.value.args[0]["tags"] == []

View File

@ -4,7 +4,7 @@ minversion = 3.4.0
envlist = docs, linters, molecule
skipdist = True
requires =
tox-ansible >= 1.0.3
tox-ansible >= 1.0.5
[testenv]
usedevelop = True
@ -55,15 +55,15 @@ setenv =
ANSIBLE_SHOW_CUSTOM_STATS=1
PYTHONPATH={env:PYTHONPATH:}:library
# failsafe mechanism to avoid accidental disablement of tests
# 3 molecule + 3 unit = 6
PYTEST_REQPASS=6
# 3 molecule tests expected
PYTEST_REQPASS=3
deps =
ansible>=2.9,<2.10
molecule[test,docker]>=3.2.2,<3.3 # MIT
pytest-molecule>=1.3.4
pytest-plus # provides support for PYTEST_REQPASS
commands =
python -m pytest --color=yes --html={envlogdir}/reports.html --self-contained-html {tty:-s} {posargs}
python -m pytest --color=yes --html={envlogdir}/reports.html --self-contained-html {tty:-s} -k molecule {posargs}
[testenv:ansible]

View File

@ -8,6 +8,15 @@
# we want to run sanity only on py36 instead of implicit 2.6-3.9 range
tox_extra_args: -- --python 3.6
- job:
name: tox-ansible-test-units
description: Runs ansible-test sanity (tox -e sanity)
parent: tox
vars:
tox_envlist: units # dynamic tox env added by tox-ansible
# we want to run sanity only on py36 instead of implicit 2.6-3.9 range
# tox_extra_args: -- --python 3.6
- job:
name: zuul-ansible-role-collect-logs
description: Validate that zuul can use that role.
@ -28,6 +37,7 @@
- openstack-tox-linters
- openstack-tox-molecule
- tox-ansible-test-sanity
- tox-ansible-test-units
# Limit the number of jobs executed while still assuring a relevant
# level of coverage. If specific tasks are to be tested we should
# consider implementing functional tests for them, especially as