Adding script to convert queries to sova format
- add a few sova regexes in queries - updated requirements file to include PyYaml used in the converter script - Added a converter script which : - Parses queries.yml - If an entry has regex it forms sova query with regex, if the entry has patterns but no regex it converts the pattern (string) to regex (escapes characters) and forms sova query - Writes the sova output to sova-pattern-generated.json (so as not to overwrite existing sova_pattern.json right now) Change-Id: Iae3df0d901f2b25bdb27f1b3bea5042f82128dcf
This commit is contained in:
parent
07a410c9b8
commit
146d3d7fff
4
ansible.cfg
Normal file
4
ansible.cfg
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[defaults]
|
||||||
|
interpreter_python=auto_silent
|
||||||
|
inventory = hosts
|
||||||
|
nocows = 0
|
@ -1 +1,4 @@
|
|||||||
jq
|
jq
|
||||||
|
# requried by sova ansible module (not inside the venv)
|
||||||
|
python3-yaml [platform:ubuntu]
|
||||||
|
python3-pyyaml [platform:rpm !platform:rhel-7 !platform:centos-7]
|
||||||
|
@ -85,8 +85,17 @@
|
|||||||
},
|
},
|
||||||
"regex": {
|
"regex": {
|
||||||
"title": "Regex",
|
"title": "Regex",
|
||||||
"default": false,
|
"anyOf": [
|
||||||
"type": "boolean"
|
{
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"multiline": {
|
"multiline": {
|
||||||
"title": "Multiline",
|
"title": "Multiline",
|
||||||
@ -103,8 +112,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"id",
|
"id"
|
||||||
"pattern"
|
|
||||||
],
|
],
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
}
|
}
|
||||||
|
56
output/sova-pattern-generated.json
Normal file
56
output/sova-pattern-generated.json
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"patterns": {
|
||||||
|
"console": [
|
||||||
|
{
|
||||||
|
"id": "java_io_exception_remote_call",
|
||||||
|
"logstash": "",
|
||||||
|
"msg": "java_io_exception_remote_call",
|
||||||
|
"pattern": "java_io_exception_remote_call",
|
||||||
|
"tag": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "overcloud_create_failed",
|
||||||
|
"logstash": "",
|
||||||
|
"msg": "overcloud_create_failed",
|
||||||
|
"pattern": "overcloud_create_failed",
|
||||||
|
"tag": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "timeout_re",
|
||||||
|
"logstash": "",
|
||||||
|
"msg": "timeout_re",
|
||||||
|
"pattern": "timeout_re",
|
||||||
|
"tag": "info"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "curl_re",
|
||||||
|
"logstash": "",
|
||||||
|
"msg": "curl_re",
|
||||||
|
"pattern": "curl_re",
|
||||||
|
"tag": "info"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"regexes": [
|
||||||
|
{
|
||||||
|
"name": "java_io_exception_remote_call",
|
||||||
|
"regex": [
|
||||||
|
"java\\.io\\.IOException",
|
||||||
|
"Remote\\ call\\ on",
|
||||||
|
"failed"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "overcloud_create_failed",
|
||||||
|
"regex": "Stack\\ overcloud\\ CREATE_FAILED"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "timeout_re",
|
||||||
|
"regex": "Killed\\s+timeout -s 9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "curl_re",
|
||||||
|
"regex": "curl. .*? couldn\\\\'t open file \"(.*?)\""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
35
playbooks/sova.yml
Normal file
35
playbooks/sova.yml
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
---
|
||||||
|
- name: Validate that sova can parse what we produce
|
||||||
|
hosts: localhost
|
||||||
|
connection: local
|
||||||
|
gather_facts: true
|
||||||
|
collections:
|
||||||
|
- tripleo.collect_logs
|
||||||
|
vars:
|
||||||
|
sova_cfg_file: "{{ lookup('file', playbook_dir + '/../output/sova-pattern-generated.json') }}"
|
||||||
|
samples_dir: "{{ (playbook_dir, '../samples/') | path_join | realpath }}"
|
||||||
|
samples_log: "{{ samples_dir }}/errors-testing.err"
|
||||||
|
tasks:
|
||||||
|
- name: Display file to be tested
|
||||||
|
debug:
|
||||||
|
var: sova_cfg_file
|
||||||
|
|
||||||
|
- name: Run sova task
|
||||||
|
sova:
|
||||||
|
config: "{{ sova_cfg_file }}"
|
||||||
|
files:
|
||||||
|
console: "{{ samples_log }}"
|
||||||
|
result: "sova.log"
|
||||||
|
result_file_dir: "{{ (playbook_dir, '../output') | path_join | realpath }}"
|
||||||
|
register: result
|
||||||
|
|
||||||
|
- name: Display sova result
|
||||||
|
debug:
|
||||||
|
var: result
|
||||||
|
|
||||||
|
- name: Fail it unexpected result is detected
|
||||||
|
fail:
|
||||||
|
msg: "Unexpected result: {{ result }}"
|
||||||
|
when: >
|
||||||
|
samples_log not in result.processed_files
|
||||||
|
or result.file_written != 'sova.log'
|
@ -1,3 +1,4 @@
|
|||||||
|
ansible-base>=2.10 # GPL
|
||||||
pydantic>=1.7.4 # MIT
|
pydantic>=1.7.4 # MIT
|
||||||
yq # Apache
|
yq # Apache
|
||||||
jsonschema # MIT
|
jsonschema # MIT
|
||||||
|
@ -4,19 +4,36 @@
|
|||||||
#
|
#
|
||||||
# pip-compile --output-file=requirements.txt requirements.in
|
# pip-compile --output-file=requirements.txt requirements.in
|
||||||
#
|
#
|
||||||
|
ansible-base==2.10.8
|
||||||
|
# via -r requirements.in
|
||||||
argcomplete==1.12.2
|
argcomplete==1.12.2
|
||||||
# via yq
|
# via yq
|
||||||
attrs==20.3.0
|
attrs==20.3.0
|
||||||
# via jsonschema
|
# via jsonschema
|
||||||
|
cffi==1.14.5
|
||||||
|
# via cryptography
|
||||||
|
cryptography==3.4.7
|
||||||
|
# via ansible-base
|
||||||
|
jinja2==2.11.3
|
||||||
|
# via ansible-base
|
||||||
jsonschema==3.2.0
|
jsonschema==3.2.0
|
||||||
# via -r requirements.in
|
# via -r requirements.in
|
||||||
|
markupsafe==1.1.1
|
||||||
|
# via jinja2
|
||||||
|
packaging==20.9
|
||||||
|
# via ansible-base
|
||||||
|
pycparser==2.20
|
||||||
|
# via cffi
|
||||||
pydantic==1.8.1
|
pydantic==1.8.1
|
||||||
# via -r requirements.in
|
# via -r requirements.in
|
||||||
|
pyparsing==2.4.7
|
||||||
|
# via packaging
|
||||||
pyrsistent==0.17.3
|
pyrsistent==0.17.3
|
||||||
# via jsonschema
|
# via jsonschema
|
||||||
pyyaml==5.4.1
|
pyyaml==5.4.1
|
||||||
# via
|
# via
|
||||||
# -r requirements.in
|
# -r requirements.in
|
||||||
|
# ansible-base
|
||||||
# yq
|
# yq
|
||||||
six==1.15.0
|
six==1.15.0
|
||||||
# via jsonschema
|
# via jsonschema
|
||||||
|
4
requirements.yml
Normal file
4
requirements.yml
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
collections:
|
||||||
|
- name: git+https://opendev.org/openstack/ansible-role-collect-logs
|
||||||
|
type: git
|
||||||
|
version: master
|
3
samples/errors-testing.err
Normal file
3
samples/errors-testing.err
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
couldn't open file
|
||||||
|
Stack overcloud CREATE_FAILED
|
||||||
|
AnsibleUndefinedVariable
|
@ -16,5 +16,7 @@ queries:
|
|||||||
pattern: "Stack overcloud CREATE_FAILED"
|
pattern: "Stack overcloud CREATE_FAILED"
|
||||||
# from https://opendev.org/opendev/elastic-recheck/src/branch/master/queries/1260654.yaml
|
# from https://opendev.org/opendev/elastic-recheck/src/branch/master/queries/1260654.yaml
|
||||||
- id: timeout_re
|
- id: timeout_re
|
||||||
pattern: Killed\s+timeout -s 9
|
regex: 'Killed\s+timeout -s 9'
|
||||||
regex: true
|
- id: curl_re
|
||||||
|
pattern: "couldn't open file"
|
||||||
|
regex: 'curl. .*? couldn\\''t open file "(.*?)"'
|
||||||
|
@ -7,7 +7,7 @@ from pydantic import BaseModel, Field, Extra, HttpUrl
|
|||||||
class Query(BaseModel):
|
class Query(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
name: Optional[str]
|
name: Optional[str]
|
||||||
pattern: Union[List[str], str]
|
pattern: Optional[Union[List[str], str]]
|
||||||
|
|
||||||
category: Optional[str]
|
category: Optional[str]
|
||||||
url: Optional[Union[List[HttpUrl], HttpUrl]]
|
url: Optional[Union[List[HttpUrl], HttpUrl]]
|
||||||
@ -19,7 +19,7 @@ class Query(BaseModel):
|
|||||||
description="Used for elastic-recheck")
|
description="Used for elastic-recheck")
|
||||||
|
|
||||||
# artcl/sove specific fields
|
# artcl/sove specific fields
|
||||||
regex: Optional[bool] = False
|
regex: Optional[Union[List[str], str]]
|
||||||
# https://opendev.org/openstack/ansible-role-collect-logs/src/branch/master/vars/sova-patterns.yml#L47
|
# https://opendev.org/openstack/ansible-role-collect-logs/src/branch/master/vars/sova-patterns.yml#L47
|
||||||
multiline: Optional[bool] = False
|
multiline: Optional[bool] = False
|
||||||
files: Optional[List[str]] = Field(
|
files: Optional[List[str]] = Field(
|
||||||
|
72
src/sova-converter.py
Normal file
72
src/sova-converter.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import yaml
|
||||||
|
import re
|
||||||
|
|
||||||
|
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
# Source and destination files
|
||||||
|
queries_src = os.path.join(dir_path, 'data', 'queries.yml')
|
||||||
|
sova_dest = os.path.join(os.path.dirname(dir_path), 'output', 'sova-pattern-generated.json')
|
||||||
|
# elastic_recheck_dest = os.path.join(os.path.dirname(dir_path), 'output', 'elastic-recheck-pattern-generated.json')
|
||||||
|
|
||||||
|
with open(queries_src) as in_file:
|
||||||
|
queries_list = yaml.load(in_file, Loader=yaml.BaseLoader)
|
||||||
|
|
||||||
|
sova_regex_list = []
|
||||||
|
sova_patterns_list = {"console": []}
|
||||||
|
sova_dict = {
|
||||||
|
'regexes': sova_regex_list,
|
||||||
|
'patterns': sova_patterns_list
|
||||||
|
}
|
||||||
|
|
||||||
|
for query_dict in queries_list['queries']:
|
||||||
|
regex_dict = {}
|
||||||
|
regex_str = query_dict.get("regex", "")
|
||||||
|
|
||||||
|
if {'regex', 'pattern'} <= query_dict.keys():
|
||||||
|
# No pattern/regex conversion
|
||||||
|
regex_dict = {
|
||||||
|
"name": query_dict["id"],
|
||||||
|
"regex": query_dict["regex"]
|
||||||
|
}
|
||||||
|
# copy elastic_recheck pattern from query_dict pattern
|
||||||
|
elif 'regex' in query_dict:
|
||||||
|
# Convert regex to pattern for ER
|
||||||
|
regex_dict = {
|
||||||
|
"name": query_dict["id"],
|
||||||
|
"regex": query_dict["regex"]
|
||||||
|
}
|
||||||
|
# form elastic_recheck pattern from query_dict regex
|
||||||
|
elif 'pattern' in query_dict:
|
||||||
|
# Convert pattern to regex for Sova
|
||||||
|
if isinstance(query_dict["pattern"], str):
|
||||||
|
generated_regex = re.escape(query_dict["pattern"])
|
||||||
|
regex_dict = {
|
||||||
|
"name": query_dict["id"],
|
||||||
|
"regex": generated_regex
|
||||||
|
}
|
||||||
|
regex_str = generated_regex
|
||||||
|
elif isinstance(query_dict["pattern"], list):
|
||||||
|
generated_regex = []
|
||||||
|
for item in query_dict["pattern"]:
|
||||||
|
generated_regex.append(re.escape(item))
|
||||||
|
regex_dict = {
|
||||||
|
"name": query_dict["id"],
|
||||||
|
"regex": generated_regex
|
||||||
|
}
|
||||||
|
# copy elastic_recheck pattern from query_dict pattern
|
||||||
|
sova_regex_list.append(regex_dict)
|
||||||
|
patterns = sova_patterns_list.get("console", list())
|
||||||
|
# if regex_str:
|
||||||
|
patterns.append({
|
||||||
|
"id": query_dict['id'],
|
||||||
|
"logstash": "",
|
||||||
|
"msg": query_dict['id'],
|
||||||
|
"pattern": regex_dict['name'],
|
||||||
|
"tag": "info"
|
||||||
|
})
|
||||||
|
|
||||||
|
with open(sova_dest, 'w') as out_file:
|
||||||
|
json.dump(sova_dict, out_file, indent=4, sort_keys=True)
|
||||||
|
# Adds newline to pass lint
|
||||||
|
out_file.write("\n")
|
5
tox.ini
5
tox.ini
@ -19,6 +19,9 @@ deps =
|
|||||||
commands =
|
commands =
|
||||||
python3 src/model.py
|
python3 src/model.py
|
||||||
bash -c "cat src/data/queries.yml | yq | jsonschema -i /dev/stdin output/queries-schema.json"
|
bash -c "cat src/data/queries.yml | yq | jsonschema -i /dev/stdin output/queries-schema.json"
|
||||||
|
python3 src/sova-converter.py
|
||||||
|
ansible-galaxy collection install -r requirements.yml
|
||||||
|
ansible-playbook playbooks/sova.yml
|
||||||
passenv =
|
passenv =
|
||||||
CURL_CA_BUNDLE # https proxies, https://github.com/tox-dev/tox/issues/1437
|
CURL_CA_BUNDLE # https proxies, https://github.com/tox-dev/tox/issues/1437
|
||||||
FORCE_COLOR
|
FORCE_COLOR
|
||||||
@ -27,6 +30,7 @@ passenv =
|
|||||||
PYTEST_* # allows developer to define their own preferences
|
PYTEST_* # allows developer to define their own preferences
|
||||||
PY_COLORS
|
PY_COLORS
|
||||||
REQUESTS_CA_BUNDLE # https proxies
|
REQUESTS_CA_BUNDLE # https proxies
|
||||||
|
TERM
|
||||||
SSL_CERT_FILE # https proxies
|
SSL_CERT_FILE # https proxies
|
||||||
# recreate = True
|
# recreate = True
|
||||||
setenv =
|
setenv =
|
||||||
@ -35,6 +39,7 @@ setenv =
|
|||||||
skip_install = true
|
skip_install = true
|
||||||
allowlist_externals =
|
allowlist_externals =
|
||||||
bash
|
bash
|
||||||
|
cat
|
||||||
|
|
||||||
[testenv:deps]
|
[testenv:deps]
|
||||||
description = Update dependency lock files
|
description = Update dependency lock files
|
||||||
|
Loading…
Reference in New Issue
Block a user