Add black, isort and flake8

All changes includes were either made by black
and isort or requested by flake8.

Change-Id: Ie7998d722ea4a7d4106d106c1476b3f1255d656a
This commit is contained in:
Sorin Sbarnea 2021-04-22 12:29:27 +01:00
parent 939ca8ac23
commit 3cfc6361f4
6 changed files with 137 additions and 65 deletions

45
.flake8 Normal file
View File

@ -0,0 +1,45 @@
[flake8]
# Don't even try to analyze these:
exclude =
# No need to traverse egg files
*.egg,
# No need to traverse egg info dir
*.egg-info,
# No need to traverse eggs directory
.eggs,
# No need to traverse our git directory
.git,
# GitHub configs
.github,
# Cache files of MyPy
.mypy_cache,
# Cache files of pytest
.pytest_cache,
# Temp dir of pytest-testmon
.tmontmp,
# Countless third-party libs in venvs
.tox,
# Occasional virtualenv dir
.venv
# VS Code
.vscode,
# There's no value in checking cache directories
__pycache__,
# Temporary build dir
build,
# This contains sdists and wheels of ansible-lint that we don't want to check
dist,
# Occasional virtualenv dir
env,
# Metadata of `pip wheel` cmd is autogenerated
pip-wheel-metadata,
# Let's not overcomplicate the code:
max-complexity = 10
# black
max-line-length = 88
# https://black.readthedocs.io/en/stable/the_black_code_style.html#line-length
ignore = E203,E501,W503

View File

@ -12,7 +12,7 @@ repos:
- id: debug-statements
language_version: python3
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.25.0
rev: v1.26.1
hooks:
- id: yamllint
files: \.(yaml|yml)$
@ -23,3 +23,24 @@ repos:
files: src\/data\/queries\.yml$
entry: >
yamllint --strict -d "rules: { key-ordering: enable }"
- repo: https://github.com/pre-commit/mirrors-isort
rev: v5.8.0
hooks:
- id: isort
args:
# https://github.com/pre-commit/mirrors-isort/issues/9#issuecomment-624404082
- --filter-files
- repo: https://github.com/python/black.git
rev: 20.8b1
hooks:
- id: black
language_version: python3
- repo: https://github.com/pycqa/flake8.git
rev: 3.9.1
hooks:
- id: flake8
language_version: python3
additional_dependencies:
- flake8-2020>=1.6.0
- flake8-docstrings>=1.5.0
- flake8-pytest-style>=1.2.2

View File

@ -1,5 +1,6 @@
{
"title": "Queries",
"description": "Queries Model.",
"type": "object",
"properties": {
"queries": {
@ -16,6 +17,7 @@
"definitions": {
"Query": {
"title": "Query",
"description": "Query Model.",
"type": "object",
"properties": {
"id": {

View File

@ -1,22 +1,23 @@
import json
import os
import yaml
import re
"""Generate Elastic-Recheck compatible yaml files from human readable queries.
"""
This script generates Elastic-Recheck compatible yaml files from human readable queries.
It takes 'pattern' and 'tags', both of which can be strings or lists, from input file and forms
an elastic recheck query. This query is written in a file with filename same as the bug number if bug URL is provided
It takes 'pattern' and 'tags', both of which can be strings or lists,
from input file and forms an elastic recheck query. This query is written in a
file with filename same as the bug number if bug URL is provided
or else the id from input file.
input: src/data/queries.yml
output: output/elastic-recheck/<id/bug_no>.yaml
"""
import os
import yaml
dir_path = os.path.dirname(os.path.realpath(__file__))
# Source and destination files
queries_src = os.path.join(dir_path, 'data', 'queries.yml')
elastic_recheck_dest_dir = os.path.join(os.path.dirname(dir_path), 'output', 'elastic-recheck')
queries_src = os.path.join(dir_path, "data", "queries.yml")
elastic_recheck_dest_dir = os.path.join(
os.path.dirname(dir_path), "output", "elastic-recheck"
)
# Make sure dest dir for er is present
if not os.path.exists(elastic_recheck_dest_dir):
os.makedirs(elastic_recheck_dest_dir)
@ -24,14 +25,14 @@ if not os.path.exists(elastic_recheck_dest_dir):
with open(queries_src) as in_file:
queries_list = yaml.load(in_file, Loader=yaml.BaseLoader)
for query_dict in queries_list['queries']:
for query_dict in queries_list["queries"]:
if "pattern" not in query_dict:
continue
# Assuming "pattern" is always present if it needs to be shown in ER
suppress_graph = False # default
message = ''
message = ""
if "url" in query_dict:
out_filename = query_dict["url"].split('/')[-1] + ".yaml"
out_filename = query_dict["url"].split("/")[-1] + ".yaml"
else:
out_filename = query_dict["id"] + ".yaml"
if isinstance(query_dict["pattern"], str):
@ -44,15 +45,21 @@ for query_dict in queries_list['queries']:
# message: "Remote call on"
# AND
# message: "failed"
message = ' AND '.join('message:"' + pattern + '"' for pattern in query_dict["pattern"])
if 'tags' in query_dict:
message = " AND ".join(
'message:"' + pattern + '"' for pattern in query_dict["pattern"]
)
if "tags" in query_dict:
if isinstance(query_dict["tags"], str):
message += "AND " + "tags:" + query_dict["tags"] + '"'
elif isinstance(query_dict["tags"], list):
message += " AND (" + ' OR '.join('tags:"' + tags + '"' for tags in query_dict["tags"]) + ")"
if 'suppress-graph' in query_dict:
message += (
" AND ("
+ " OR ".join('tags:"' + tags + '"' for tags in query_dict["tags"])
+ ")"
)
if "suppress-graph" in query_dict:
suppress_graph = bool(query_dict["suppress-graph"])
er_query = {"query": message, "suppress-graph": suppress_graph}
with open(os.path.join(elastic_recheck_dest_dir, out_filename), 'w') as out_file:
with open(os.path.join(elastic_recheck_dest_dir, out_filename), "w") as out_file:
yaml.dump(er_query, out_file, default_flow_style=False, width=88)

View File

@ -1,10 +1,13 @@
"""Models used for validation."""
from pathlib import Path
from typing import List, Optional, Union
from pydantic import BaseModel, Field, Extra, HttpUrl
from pydantic import BaseModel, Extra, Field, HttpUrl
class Query(BaseModel):
"""Query Model."""
id: str
name: Optional[str]
pattern: Optional[Union[List[str], str]]
@ -15,21 +18,26 @@ class Query(BaseModel):
# elastic-search specific fields
tags: Optional[Union[List[str], str]]
suppress_graph: Optional[bool] = Field(
alias='suppress-graph',
description="Used for elastic-recheck")
alias="suppress-graph", description="Used for elastic-recheck"
)
# artcl/sove specific fields
regex: Optional[Union[List[str], str]]
# https://opendev.org/openstack/ansible-role-collect-logs/src/branch/master/vars/sova-patterns.yml#L47
multiline: Optional[bool] = False
files: Optional[List[str]] = Field(
description="List of glob patterns, narrows down searching")
description="List of glob patterns, narrows down searching"
)
class Config:
"""Disalow unknown attributes."""
extra = Extra.forbid
class Queries(BaseModel):
"""Queries Model."""
queries: List[Query]

View File

@ -1,76 +1,65 @@
import json
import os
import yaml
import re
"""Generates Sova compatible json file from human readable queries.
"""
This script generates Sova compatible json file from human readable queries.
It takes 'regex' or if 'regex' is not available it converts 'pattern' to regex, from input file and forms
sova compatible json file.
It takes 'regex' or if 'regex' is not available it converts 'pattern' to regex,
from input file and forms sova compatible json file.
input: src/data/queries.yml
output: output/sova-pattern-generated.json
"""
import json
import os
import re
import yaml
dir_path = os.path.dirname(os.path.realpath(__file__))
# Source and destination files
queries_src = os.path.join(dir_path, 'data', 'queries.yml')
sova_dest = os.path.join(os.path.dirname(dir_path), 'output', 'sova-pattern-generated.json')
queries_src = os.path.join(dir_path, "data", "queries.yml")
sova_dest = os.path.join(
os.path.dirname(dir_path), "output", "sova-pattern-generated.json"
)
with open(queries_src) as in_file:
queries_list = yaml.load(in_file, Loader=yaml.BaseLoader)
sova_regex_list = []
sova_patterns_list = {"console": []}
sova_dict = {
'regexes': sova_regex_list,
'patterns': sova_patterns_list
}
sova_dict = {"regexes": sova_regex_list, "patterns": sova_patterns_list}
for query_dict in queries_list['queries']:
for query_dict in queries_list["queries"]:
regex_dict = {}
regex_str = query_dict.get("regex", "")
if {'regex', 'pattern'} <= query_dict.keys():
if {"regex", "pattern"} <= query_dict.keys():
# No pattern/regex conversion
regex_dict = {
"name": query_dict["id"],
"regex": query_dict["regex"]
}
elif 'regex' in query_dict:
regex_dict = {
"name": query_dict["id"],
"regex": query_dict["regex"]
}
elif 'pattern' in query_dict:
regex_dict = {"name": query_dict["id"], "regex": query_dict["regex"]}
elif "regex" in query_dict:
regex_dict = {"name": query_dict["id"], "regex": query_dict["regex"]}
elif "pattern" in query_dict:
# Convert pattern to regex for Sova
if isinstance(query_dict["pattern"], str):
generated_regex = re.escape(query_dict["pattern"])
regex_dict = {
"name": query_dict["id"],
"regex": generated_regex
}
regex_dict = {"name": query_dict["id"], "regex": generated_regex}
regex_str = generated_regex
elif isinstance(query_dict["pattern"], list):
generated_regex = []
for item in query_dict["pattern"]:
generated_regex.append(re.escape(item))
regex_dict = {
"name": query_dict["id"],
"regex": generated_regex
}
regex_dict = {"name": query_dict["id"], "regex": generated_regex}
sova_regex_list.append(regex_dict)
patterns = sova_patterns_list.get("console", list())
# if regex_str:
patterns.append({
"id": query_dict['id'],
patterns.append(
{
"id": query_dict["id"],
"logstash": "",
"msg": query_dict['id'],
"pattern": regex_dict['name'],
"tag": "info"
})
"msg": query_dict["id"],
"pattern": regex_dict["name"],
"tag": "info",
}
)
with open(sova_dest, 'w') as out_file:
with open(sova_dest, "w") as out_file:
json.dump(sova_dict, out_file, indent=4, sort_keys=True)
# Adds newline to pass lint
out_file.write("\n")