Bump linters

- upgrade linters
- enable black formatting so we don't waste time making flake8 happy
- all the .py files modified by this patch were modified by black itself

Change-Id: I947cf8934a57ad519242757c777b23155fcbe7f4
This commit is contained in:
Sorin Sbarnea 2021-02-16 15:59:38 +00:00
parent 0608040b24
commit 51160038a5
15 changed files with 472 additions and 413 deletions

View File

@ -2,8 +2,4 @@
parseable: true
skip_list:
# Add skips here only as last resort, like:
# https://github.com/ansible/ansible-lint/issues/557
- 302 # [E302] mkdir used in place of argument state=directory to file module
- 303 # [E303] ... used in place of ... module
- 208 # [E208]
- 106 # [E106]
- role-name

View File

@ -1,7 +1,16 @@
---
repos:
- repo: https://github.com/PyCQA/isort
rev: 5.7.0
hooks:
- id: isort
- repo: https://github.com/python/black.git
rev: 20.8b1
hooks:
- id: black
language_version: python3
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.4.0
rev: v3.4.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
@ -10,18 +19,25 @@ repos:
- id: check-executables-have-shebangs
- id: check-merge-conflict
- id: debug-statements
- id: flake8
- id: check-yaml
files: .*\.(yaml|yml)$
- repo: https://gitlab.com/pycqa/flake8.git
rev: 3.8.4
hooks:
- id: flake8
additional_dependencies:
- flake8-absolute-import
- flake8-black>=0.1.1
language_version: python3
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.18.0
rev: v1.26.0
hooks:
- id: yamllint
files: \.(yaml|yml)$
types: [file, yaml]
entry: yamllint --strict -f parsable
- repo: https://github.com/ansible/ansible-lint.git
rev: 9da220ae3a11c10c10ee43284ad6cad6d8ba52b7
rev: v5.0.0
hooks:
- id: ansible-lint
always_run: true
@ -31,7 +47,7 @@ repos:
verbose: true
entry: env ANSIBLE_LIBRARY=./library ansible-lint --force-color -v .
- repo: https://github.com/openstack-dev/bashate.git
rev: 0.6.0
rev: 2.0.0
hooks:
- id: bashate
entry: bashate --error . --verbose --ignore=E006,E040

View File

@ -28,7 +28,8 @@
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import (absolute_import, division, print_function)
from __future__ import absolute_import, division, print_function
import sphinx_rtd_theme
__metaclass__ = type
@ -51,31 +52,31 @@ html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = '.rst'
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
master_doc = "index"
# General information about the project.
project = u'TripleO'
copyright = u'2016, RDO CI Team'
bug_tracker = u'Bugzilla'
bug_tracker_url = u'https://bugzilla.redhat.com'
project = u"TripleO"
copyright = u"2016, RDO CI Team"
bug_tracker = u"Bugzilla"
bug_tracker_url = u"https://bugzilla.redhat.com"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.0.0'
version = "3.0.0"
# The full version, including alpha/beta/rc tags.
release = '3.0.0'
release = "3.0.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -106,7 +107,7 @@ exclude_patterns = []
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@ -114,12 +115,12 @@ pygments_style = 'sphinx'
# -- Options for HTML output ---------------------------------------------------
html_static_path = ['_custom']
html_style = 'custom.css'
html_last_updated_fmt = '%b %d, %Y'
html_static_path = ["_custom"]
html_style = "custom.css"
html_last_updated_fmt = "%b %d, %Y"
# Output file base name for HTML help builder.
htmlhelp_basename = 'tripleo-documentor'
htmlhelp_basename = "tripleo-documentor"
html_show_sourcelink = True
html_show_sphinx = True
@ -130,10 +131,8 @@ html_show_copyright = True
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
@ -142,4 +141,8 @@ rst_prolog = """
.. |project| replace:: %s
.. |bug_tracker| replace:: %s
.. |bug_tracker_url| replace:: %s
""" % (project, bug_tracker, bug_tracker_url)
""" % (
project,
bug_tracker,
bug_tracker_url,
)

View File

@ -11,7 +11,7 @@
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
from __future__ import absolute_import, division, print_function
import gzip
import logging
@ -24,9 +24,12 @@ except ImportError:
__metaclass__ = type
logging.basicConfig(
format=('%(asctime)s - %(name)s - %(levelname)s - '
'%(module)s.%(funcName)s:%(lineno)d - %(message)s'))
log = logging.getLogger('parser')
format=(
"%(asctime)s - %(name)s - %(levelname)s - "
"%(module)s.%(funcName)s:%(lineno)d - %(message)s"
)
)
log = logging.getLogger("parser")
log.setLevel(logging.ERROR)
@ -48,22 +51,23 @@ class Pattern(object):
def setup_regexes(self):
self.regexes = {}
if self.config:
for regexp in self.config.get('regexes', []):
for regexp in self.config.get("regexes", []):
flags = []
if regexp.get('multiline'):
if regexp.get("multiline"):
flags.append(regex_module.MULTILINE)
self.regexes[regexp.get('name')] = regex_module.compile(
r'{0}'.format(regexp.get('regex')), *flags)
self.regexes[regexp.get("name")] = regex_module.compile(
r"{0}".format(regexp.get("regex")), *flags
)
def setup_patterns(self):
self._patterns = self.config.get('patterns', {})
self._patterns = self.config.get("patterns", {})
if self._patterns:
for key in self._patterns:
for p in self._patterns[key]:
if p['pattern'] in self.regexes:
p['pattern'] = self.regexes[p['pattern']]
if p['logstash'] in self.regexes:
p['logstash'] = self.regexes[p['logstash']]
if p["pattern"] in self.regexes:
p["pattern"] = self.regexes[p["pattern"]]
if p["logstash"] in self.regexes:
p["logstash"] = self.regexes[p["logstash"]]
@property
def patterns(self):
@ -94,12 +98,9 @@ def parse(text_file, patterns):
with open_func(text_file, "rt") as finput:
text = finput.read()
for p in patterns:
line_matched = line_match(
p["pattern"], text, exclude=p.get("exclude"))
line_matched = line_match(p["pattern"], text, exclude=p.get("exclude"))
if line_matched:
log.debug(
"Found pattern %s in file %s",
repr(p), text_file)
log.debug("Found pattern %s in file %s", repr(p), text_file)
ids.append(p["id"])
msgs.append(p["msg"].format(line_matched))
return list(set(ids)), list(set(msgs))

View File

@ -11,10 +11,10 @@
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: ara_graphite
version_added: "1.0.0"
@ -55,9 +55,9 @@ options:
required: False
default: True
type: bool
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Get ARA json data
shell: "{{ local_working_dir }}/bin/ara task list --all -f json"
register: ara_data
@ -67,7 +67,7 @@ EXAMPLES = '''
ara_data: "{{ ara_task_output.stdout }}"
ara_mapping:
- "Name of task that deploys overcloud": overcloud.deploy.seconds
'''
"""
import ast # noqa: E402
import datetime # noqa: E402
@ -75,27 +75,28 @@ import socket # noqa: E402
def stamp(x):
'''Convert ISO timestamp to Unix timestamp
"""Convert ISO timestamp to Unix timestamp
:param x: string with timestamp
:return: string with Unix timestamp
'''
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S").strftime('%s')
"""
return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S").strftime("%s")
def task_length(x):
'''Calculate task length in seconds from "%H:%M:%S" format
"""Calculate task length in seconds from "%H:%M:%S" format
:param x: datetime string
:return: number of seconds spent for task
'''
"""
t = datetime.datetime.strptime(x, "%H:%M:%S")
return datetime.timedelta(hours=t.hour, minutes=t.minute,
seconds=t.second).total_seconds()
return datetime.timedelta(
hours=t.hour, minutes=t.minute, seconds=t.second
).total_seconds()
def translate(mapping, json_data, only_ok):
'''Create data to send to Graphite server in format:
"""Create data to send to Graphite server in format:
GraphitePath Timestamp TaskDuration
GraphitePath is taken from mapping dictionary according to task name.
@ -103,26 +104,27 @@ def translate(mapping, json_data, only_ok):
:param mapping: dictionary of mapping task names to graphite paths
:param json_data: JSON data with tasks and times
:return: list of graphite data
'''
"""
items = []
data = ast.literal_eval(json_data)
for task in data:
if not only_ok or (only_ok and task['Status'] in ['changed', 'ok']):
if task['Name'] in mapping:
timestamp, duration = stamp(task['Time Start']), task_length(
task['Duration'])
items.append([mapping[task['Name']], duration, timestamp])
if not only_ok or (only_ok and task["Status"] in ["changed", "ok"]):
if task["Name"] in mapping:
timestamp, duration = stamp(task["Time Start"]), task_length(
task["Duration"]
)
items.append([mapping[task["Name"]], duration, timestamp])
return items
def send(data, gr_host, gr_port, prefix):
'''Actual sending of data to Graphite server via network
"""Actual sending of data to Graphite server via network
:param data: list of items to send to Graphite
:param gr_host: Graphite host (with optional port)
:param prefix: prefix to append before Graphite path
:return: True if sent successfully, otherwise False
'''
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(3.0)
try:
@ -132,31 +134,31 @@ def send(data, gr_host, gr_port, prefix):
for content in data:
s.send(prefix + " ".join([str(i) for i in content]) + "\n")
s.close()
return True, ''
return True, ""
def send_stats(gr_host, gr_port, mapping, json_data, prefix, only_ok):
'''Send ARA statistics to Graphite server
"""Send ARA statistics to Graphite server
:param gr_host: Graphite host (with optional port)
:param mapping: dictionary of mapping task names to graphite paths
:param json_data: JSON data with tasks and times
:param prefix: prefix to append before Graphite path
:return: JSON ansible result
'''
"""
data2send = translate(mapping, json_data, only_ok)
response, reason = send(data2send, gr_host, gr_port, prefix)
if not response:
return {
'changed': False,
'failed': True,
'graphite_host': gr_host,
'msg': "Can't connect to Graphite: %s" % reason
"changed": False,
"failed": True,
"graphite_host": gr_host,
"msg": "Can't connect to Graphite: %s" % reason,
}
return {
'changed': True,
'graphite_host': gr_host,
'sent_data': data2send,
"changed": True,
"graphite_host": gr_host,
"sent_data": data2send,
}
@ -165,21 +167,22 @@ def main():
module = AnsibleModule(
argument_spec=dict(
graphite_host=dict(required=True, type='str'),
graphite_port=dict(required=False, type='int', default=2003),
ara_mapping=dict(required=True, type='dict'),
ara_data=dict(required=True, type='str'),
graphite_prefix=dict(required=False, type='str', default=''),
only_successful_tasks=dict(required=False, type='bool',
default=True)
graphite_host=dict(required=True, type="str"),
graphite_port=dict(required=False, type="int", default=2003),
ara_mapping=dict(required=True, type="dict"),
ara_data=dict(required=True, type="str"),
graphite_prefix=dict(required=False, type="str", default=""),
only_successful_tasks=dict(required=False, type="bool", default=True),
)
)
result = send_stats(module.params['graphite_host'],
module.params['graphite_port'],
module.params['ara_mapping'],
module.params['ara_data'],
module.params['graphite_prefix'],
module.params['only_successful_tasks'])
result = send_stats(
module.params["graphite_host"],
module.params["graphite_port"],
module.params["ara_mapping"],
module.params["ara_data"],
module.params["graphite_prefix"],
module.params["only_successful_tasks"],
)
module.exit_json(**result)

View File

@ -11,10 +11,10 @@
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
DOCUMENTATION = """
---
module: ara_influxdb
version_added: "1.0.0"
@ -90,9 +90,9 @@ options:
by default 0.
required: False
type: int
'''
"""
EXAMPLES = '''
EXAMPLES = """
- name: Get ARA json data
shell: "{{ local_working_dir }}/bin/ara result list --all -f json"
register: ara_data
@ -112,7 +112,7 @@ EXAMPLES = '''
standard_fields: false
longest_tasks: 15
when: ara_data.stdout != "[]"
'''
"""
import ast # noqa pylint: disable=C0413
import datetime # noqa pylint: disable=C0413
@ -120,98 +120,98 @@ import json # noqa pylint: disable=C0413
import os # noqa pylint: disable=C0413
import re # noqa pylint: disable=C0413
SCHEME = '{measure},{tags} {fields} {timestamp}'
SCHEME = "{measure},{tags} {fields} {timestamp}"
CUSTOM_MAP = {
'undercloud_install': ["undercloud-deploy : Install the undercloud"],
'prepare_images': [
"overcloud-prep-images : Prepare the overcloud images for deploy"],
'images_update': [
"undercloud_install": ["undercloud-deploy : Install the undercloud"],
"prepare_images": [
"overcloud-prep-images : Prepare the overcloud images for deploy"
],
"images_update": [
"modify-image : Convert image",
"modify-image : Run script on image",
"modify-image : Close qcow2 image"
"modify-image : Close qcow2 image",
],
'images_build': ["build-images : run the image build script (direct)"],
'containers_prepare': [
"overcloud-prep-containers : "
"Prepare for the containerized deployment"],
'overcloud_deploy': ["overcloud-deploy : Deploy the overcloud"],
'pingtest': ["validate-simple : Validate the overcloud"],
'tempest_run': ["validate-tempest : Execute tempest"],
'undercloud_reinstall': [
"validate-undercloud : Reinstall the undercloud to check idempotency"],
'overcloud_delete': [
"overcloud-delete : check for delete command to complete or fail"],
'overcloud_upgrade': ["overcloud-upgrade : Upgrade the overcloud",
"tripleo-upgrade : run docker upgrade converge step",
"tripleo-upgrade : run docker upgrade composable "
"step"],
'undercloud_upgrade': ["tripleo-upgrade : upgrade undercloud"],
"images_build": ["build-images : run the image build script (direct)"],
"containers_prepare": [
"overcloud-prep-containers : Prepare for the containerized deployment"
],
"overcloud_deploy": ["overcloud-deploy : Deploy the overcloud"],
"pingtest": ["validate-simple : Validate the overcloud"],
"tempest_run": ["validate-tempest : Execute tempest"],
"undercloud_reinstall": [
"validate-undercloud : Reinstall the undercloud to check idempotency"
],
"overcloud_delete": [
"overcloud-delete : check for delete command to complete or fail"
],
"overcloud_upgrade": [
"overcloud-upgrade : Upgrade the overcloud",
"tripleo-upgrade : run docker upgrade converge step",
"tripleo-upgrade : run docker upgrade composable step",
],
"undercloud_upgrade": ["tripleo-upgrade : upgrade undercloud"],
}
class InfluxStandardTags(object):
'''InfluxStandardTags contains:
"""InfluxStandardTags contains:
calculation of standard job describing parameters as:
* release
* nodepool provider cloud
* zuul pipeline name
* toci_jobtype
and rendering them in tags template
calculation of standard job describing parameters as:
* release
* nodepool provider cloud
* zuul pipeline name
* toci_jobtype
and rendering them in tags template
'''
"""
def branch(self):
return os.environ.get('STABLE_RELEASE') or 'master'
return os.environ.get("STABLE_RELEASE") or "master"
def cloud(self):
return os.environ.get('NODEPOOL_PROVIDER', 'null')
return os.environ.get("NODEPOOL_PROVIDER", "null")
def pipeline(self):
if os.environ.get('ZUUL_PIPELINE'):
if 'check' in os.environ['ZUUL_PIPELINE']:
return 'check'
elif 'gate' in os.environ['ZUUL_PIPELINE']:
return 'gate'
elif 'periodic' in os.environ['ZUUL_PIPELINE']:
return 'periodic'
return 'null'
if os.environ.get("ZUUL_PIPELINE"):
if "check" in os.environ["ZUUL_PIPELINE"]:
return "check"
elif "gate" in os.environ["ZUUL_PIPELINE"]:
return "gate"
elif "periodic" in os.environ["ZUUL_PIPELINE"]:
return "periodic"
return "null"
def toci_jobtype(self):
return os.environ.get('TOCI_JOBTYPE', 'null')
return os.environ.get("TOCI_JOBTYPE", "null")
def render(self):
return ('branch=%s,'
'cloud=%s,'
'pipeline=%s,'
'toci_jobtype=%s') % (
self.branch(),
self.cloud(),
self.pipeline(),
self.toci_jobtype(),
return ("branch=%s," "cloud=%s," "pipeline=%s," "toci_jobtype=%s") % (
self.branch(),
self.cloud(),
self.pipeline(),
self.toci_jobtype(),
)
class InfluxStandardFields(object):
'''InfluxStandardFields contains:
"""InfluxStandardFields contains:
calculation of time of job steps as:
* whole job duration
* testing environment preparement
* quickstart files and environment preparement
* zuul host preparement
and rendering them in template
calculation of time of job steps as:
* whole job duration
* testing environment preparement
* quickstart files and environment preparement
* zuul host preparement
and rendering them in template
'''
"""
def job_duration(self):
if os.environ.get('START_JOB_TIME'):
return int(
datetime.datetime.utcnow().strftime("%s")) - int(
os.environ.get('START_JOB_TIME'))
if os.environ.get("START_JOB_TIME"):
return int(datetime.datetime.utcnow().strftime("%s")) - int(
os.environ.get("START_JOB_TIME")
)
return 0
def logs_size(self):
@ -222,51 +222,55 @@ class InfluxStandardFields(object):
return datetime.datetime.utcnow().strftime("%s")
def testenv_prepare(self):
return os.environ.get('STATS_TESTENV', 0)
return os.environ.get("STATS_TESTENV", 0)
def quickstart_prepare(self):
return os.environ.get('STATS_OOOQ', 0)
return os.environ.get("STATS_OOOQ", 0)
def zuul_host_prepare(self):
if (os.environ.get('DEVSTACK_GATE_TIMEOUT') and # noqa: W504
os.environ.get('REMAINING_TIME')):
return (int(
os.environ['DEVSTACK_GATE_TIMEOUT']) - int(
os.environ['REMAINING_TIME'])) * 60
if os.environ.get("DEVSTACK_GATE_TIMEOUT") and os.environ.get( # noqa: W504
"REMAINING_TIME"
):
return (
int(os.environ["DEVSTACK_GATE_TIMEOUT"])
- int(os.environ["REMAINING_TIME"])
) * 60
return 0
def render(self):
return ('job_duration=%d,'
'logs_size=%d,'
'testenv_prepare=%s,'
'quickstart_prepare=%s,'
'zuul_host_prepare=%d,'
) % (
self.job_duration(),
self.logs_size(),
self.testenv_prepare(),
self.quickstart_prepare(),
self.zuul_host_prepare()
return (
"job_duration=%d,"
"logs_size=%d,"
"testenv_prepare=%s,"
"quickstart_prepare=%s,"
"zuul_host_prepare=%d,"
) % (
self.job_duration(),
self.logs_size(),
self.testenv_prepare(),
self.quickstart_prepare(),
self.zuul_host_prepare(),
)
class InfluxConfiguredFields(object):
'''InfluxConfiguredFields contains calculation:
"""InfluxConfiguredFields contains calculation:
* whole job duration
* testing environment preparement
* quickstart files and environment preparement
* zuul host preparement
and rendering them in template
"""
* whole job duration
* testing environment preparement
* quickstart files and environment preparement
* zuul host preparement
and rendering them in template
'''
def __init__(self, match_map, json_data, only_ok=True):
"""Set up data for configured field
:param match_map {dict} -- Map of tasks from ansible playbook to
names of data fields in influxDB.
:param json_data: {dict} -- JSON data generated by ARA
:param only_ok=True: {bool} -- to count only passed tasks
:param match_map {dict} -- Map of tasks from ansible playbook to
names of data fields in influxDB.
:param json_data: {dict} -- JSON data generated by ARA
:param only_ok=True: {bool} -- to count only passed tasks
"""
self.map = match_map
self.only_ok = only_ok
@ -281,26 +285,26 @@ class InfluxConfiguredFields(object):
def render(self):
tasks = self.task_maps()
result = ''
result = ""
for task, timest in tasks.items():
result += "%s=%d," % (task, timest)
return result
class InfluxLongestFields(object):
'''InfluxLongestFields runs calculation of:
"""InfluxLongestFields runs calculation of:
tasks that took the longest time.
The tasks could be from undercloud or overcloud playbooks.
tasks that took the longest time.
The tasks could be from undercloud or overcloud playbooks.
'''
"""
def __init__(self, json_data, only_ok=True, top=15):
"""Constructor for InfluxLongestFields
:param json_data: {dict} -- JSON data generated by ARA
:param only_ok=True: {bool} -- to count only passed tasks
:param top=15: {int} -- how many tasks to send to DB
:param json_data: {dict} -- JSON data generated by ARA
:param only_ok=True: {bool} -- to count only passed tasks
:param top=15: {int} -- how many tasks to send to DB
"""
self.top = top
self.only_ok = only_ok
@ -309,40 +313,37 @@ class InfluxLongestFields(object):
def collect_tasks(self):
tasks_dict = tasks_times_dict(self.data, self.only_ok)
return sorted(
[[k, v] for k, v in tasks_dict.items()],
key=lambda x: x[1],
reverse=True
)[:self.top]
[[k, v] for k, v in tasks_dict.items()], key=lambda x: x[1], reverse=True
)[: self.top]
def translate_names(self, names):
for i in names:
i[0] = re.sub(
r'[^0-9A-z\-_]+',
'',
i[0].replace(":", "__").replace(" ", "_"))
r"[^0-9A-z\-_]+", "", i[0].replace(":", "__").replace(" ", "_")
)
i[1] = int(i[1])
return names
def render(self):
result = ''
result = ""
for i in self.translate_names(self.collect_tasks()):
result += "{0}={1},".format(*i)
return result
class SovaFields(object):
'''SovaFields provides Sova calculated failure reasons.'''
"""SovaFields provides Sova calculated failure reasons."""
def __init__(self, sova_file):
"""Constructor for SovaFields
:param sova_file: {str} -- path to 'failures_file' of Sova
:param sova_file: {str} -- path to 'failures_file' of Sova
"""
self.sova_file = sova_file
def parse_sova_file(self):
if not os.path.exists(self.sova_file):
return ''
return ""
with open(self.sova_file) as f:
text = f.readlines()
reason = text[0]
@ -353,19 +354,19 @@ class SovaFields(object):
scheme = 'sova_reason="%s",sova_tag="%s",'
res = self.parse_sova_file()
if not res:
return scheme % ('', '')
return scheme % ("", "")
return scheme % (res[0], res[1])
def tasks_times_dict(tasks, only_ok=True):
times_dict = {}
for task in tasks:
if not only_ok or task['Status'] in ['changed', 'ok']:
name = task['Name']
if not only_ok or task["Status"] in ["changed", "ok"]:
name = task["Name"]
if name in times_dict:
times_dict[name].append(task['Duration'])
times_dict[name].append(task["Duration"])
else:
times_dict[name] = [task['Duration']]
times_dict[name] = [task["Duration"]]
# because of some tasks are executed multiple times we need to count
# all of them and make summary of all durations
for i in times_dict:
@ -374,26 +375,31 @@ def tasks_times_dict(tasks, only_ok=True):
def task_length(x):
'''Calculate task length in seconds from "%H:%M:%S" format
"""Calculate task length in seconds from "%H:%M:%S" format
Arguments:
x {string} -- a timestamp
Returns:
int -- total seconds for the task
'''
"""
t = datetime.datetime.strptime(x, "%H:%M:%S")
return datetime.timedelta(hours=t.hour, minutes=t.minute,
seconds=t.second).total_seconds()
return datetime.timedelta(
hours=t.hour, minutes=t.minute, seconds=t.second
).total_seconds()
def translate(measure, json_data, only_ok,
mapped_fields=True,
standard_fields=True,
longest_tasks=0,
data_file=None):
'''Create data to send to InfluxDB server in format SCHEME
def translate(
measure,
json_data,
only_ok,
mapped_fields=True,
standard_fields=True,
longest_tasks=0,
data_file=None,
):
"""Create data to send to InfluxDB server in format SCHEME
Fields keys are taken from ARA data according to task names.
@ -401,17 +407,18 @@ def translate(measure, json_data, only_ok,
:param json_data: JSON data with tasks and times
:param: only_ok: boolean, where to count only successful tasks
:return: full InfluxDB scheme
'''
"""
data = ast.literal_eval(json_data)
data = json.loads(data)
tags = InfluxStandardTags()
std_fields = InfluxStandardFields()
map_fields = InfluxConfiguredFields(
match_map=CUSTOM_MAP, json_data=data, only_ok=only_ok)
longest_fields = InfluxLongestFields(json_data=data,
top=longest_tasks,
only_ok=only_ok)
fields = ''
match_map=CUSTOM_MAP, json_data=data, only_ok=only_ok
)
longest_fields = InfluxLongestFields(
json_data=data, top=longest_tasks, only_ok=only_ok
)
fields = ""
if standard_fields:
fields += std_fields.render()
if mapped_fields:
@ -419,33 +426,34 @@ def translate(measure, json_data, only_ok,
if longest_tasks:
fields += longest_fields.render()
if data_file:
sova_fields = SovaFields(os.path.join(
os.path.dirname(data_file), 'failures_file'))
sova_fields = SovaFields(
os.path.join(os.path.dirname(data_file), "failures_file")
)
fields += sova_fields.render()
fields = fields.rstrip(",")
result = SCHEME.format(
measure=measure,
tags=tags.render(),
fields=fields,
timestamp=std_fields.timestamp()
timestamp=std_fields.timestamp(),
)
return result
def create_file_with_data(data, path):
'''Create a file with InfluxDB data to send
"""Create a file with InfluxDB data to send
:param data: data to write
:param path: path of the file
:return:
'''
"""
with open(path, "a") as f:
f.write(data + "\n")
def send(file_path, in_url, in_port, in_user, in_pass, in_db):
'''Actual sending of data to InfluxDB server via network
"""Actual sending of data to InfluxDB server via network
:param file_path: path to file with data to send
:param in_url: InfluxDB URL
@ -454,7 +462,7 @@ def send(file_path, in_url, in_port, in_user, in_pass, in_db):
:param in_pass: InfluxDB password
:param in_db: InfluxDB database name
:return: True if sent successfully, otherwise False
'''
"""
import requests # noqa pylint: disable=C0413
from requests.auth import HTTPBasicAuth # noqa pylint: disable=C0413
@ -465,26 +473,36 @@ def send(file_path, in_url, in_port, in_user, in_pass, in_db):
params = {"db": in_db, "precision": "s"}
if in_user:
if not in_pass:
if os.environ.get('INFLUXDB_PASSWORD'):
with open(os.environ['INFLUXDB_PASSWORD']) as f:
if os.environ.get("INFLUXDB_PASSWORD"):
with open(os.environ["INFLUXDB_PASSWORD"]) as f:
in_pass = f.read().strip()
else:
return False, 'InfluxDB password was not provided!'
return False, "InfluxDB password was not provided!"
auth = HTTPBasicAuth(in_user, in_pass)
else:
auth = None
with open(file_path, "rb") as payload:
req = requests.post(url, params=params, data=payload, auth=auth,
verify=False)
req = requests.post(url, params=params, data=payload, auth=auth, verify=False)
if not req or req.status_code != 204:
return False, "HTTP: %s\nResponse: %s" % (req.status_code, req.content)
return True, ''
return True, ""
def send_stats(in_url, in_port, in_user, in_pass, in_db, json_data,
measure, data_file, only_ok, mapped_fields=True,
standard_fields=True, longest_tasks=0):
'''Send ARA statistics to InfluxDB server
def send_stats(
in_url,
in_port,
in_user,
in_pass,
in_db,
json_data,
measure,
data_file,
only_ok,
mapped_fields=True,
standard_fields=True,
longest_tasks=0,
):
"""Send ARA statistics to InfluxDB server
:param in_url: InfluxDB URL
:param in_port: InfluxDB port
@ -499,64 +517,72 @@ def send_stats(in_url, in_port, in_user, in_pass, in_db, json_data,
:param: standard_fields: if to send standard fields of each job, i.e. times
:param: longest_tasks: if to print only longest tasks and how many
:return: JSON ansible result
'''
data2send = translate(measure, json_data, only_ok, mapped_fields,
standard_fields, longest_tasks, data_file)
"""
data2send = translate(
measure,
json_data,
only_ok,
mapped_fields,
standard_fields,
longest_tasks,
data_file,
)
create_file_with_data(data2send, data_file)
if in_url:
response, reason = send(data_file, in_url, in_port, in_user, in_pass,
in_db)
response, reason = send(data_file, in_url, in_port, in_user, in_pass, in_db)
if not response:
return {
'changed': False,
'failed': True,
'influxdb_url': in_url,
'msg': reason
"changed": False,
"failed": True,
"influxdb_url": in_url,
"msg": reason,
}
return {
'changed': True,
'influxdb_url': in_url,
'sent_data': data2send,
"changed": True,
"influxdb_url": in_url,
"sent_data": data2send,
}
else:
return {
'changed': True,
'data_file': data_file,
'sent_data': data2send,
"changed": True,
"data_file": data_file,
"sent_data": data2send,
}
def main():
module = AnsibleModule( # noqa
argument_spec=dict(
influxdb_url=dict(required=True, type='str'),
influxdb_port=dict(required=True, type='int'),
influxdb_user=dict(required=False, type='str', default=None),
influxdb_password=dict(required=False, type='str',
default=None, no_log=True),
influxdb_db=dict(required=True, type='str'),
ara_data=dict(required=True, type='str'),
measurement=dict(required=True, type='str'),
data_file=dict(required=True, type='str'),
only_successful_tasks=dict(required=True, type='bool'),
mapped_fields=dict(default=True, type='bool'),
standard_fields=dict(default=True, type='bool'),
longest_tasks=dict(default=0, type='int'),
influxdb_url=dict(required=True, type="str"),
influxdb_port=dict(required=True, type="int"),
influxdb_user=dict(required=False, type="str", default=None),
influxdb_password=dict(
required=False, type="str", default=None, no_log=True
),
influxdb_db=dict(required=True, type="str"),
ara_data=dict(required=True, type="str"),
measurement=dict(required=True, type="str"),
data_file=dict(required=True, type="str"),
only_successful_tasks=dict(required=True, type="bool"),
mapped_fields=dict(default=True, type="bool"),
standard_fields=dict(default=True, type="bool"),
longest_tasks=dict(default=0, type="int"),
)
)
result = send_stats(module.params['influxdb_url'],
module.params['influxdb_port'],
module.params['influxdb_user'],
module.params['influxdb_password'],
module.params['influxdb_db'],
module.params['ara_data'],
module.params['measurement'],
module.params['data_file'],
module.params['only_successful_tasks'],
module.params['mapped_fields'],
module.params['standard_fields'],
module.params['longest_tasks'],
)
result = send_stats(
module.params["influxdb_url"],
module.params["influxdb_port"],
module.params["influxdb_user"],
module.params["influxdb_password"],
module.params["influxdb_db"],
module.params["ara_data"],
module.params["measurement"],
module.params["data_file"],
module.params["only_successful_tasks"],
module.params["mapped_fields"],
module.params["standard_fields"],
module.params["longest_tasks"],
)
module.exit_json(**result)

View File

@ -13,13 +13,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '0.1',
'status': ['preview'],
'supported_by': 'community'
"metadata_version": "0.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
@ -60,21 +61,15 @@ from ansible.module_utils.basic import AnsibleModule # noqa: E402
def main():
result = {'data': [], 'changed': False}
module = AnsibleModule(
argument_spec={
'data': {
'type': 'dict',
'required': True
}
})
result = {"data": [], "changed": False}
module = AnsibleModule(argument_spec={"data": {"type": "dict", "required": True}})
try:
for group, commands in module.params['data'].items():
for group, commands in module.params["data"].items():
for cmd_name, cmd_dict in commands.items():
cmd_dict['name'] = cmd_name
cmd_dict['group'] = group
result['data'].append(cmd_dict)
cmd_dict["name"] = cmd_name
cmd_dict["group"] = group
result["data"].append(cmd_dict)
except Exception as e:
module.fail_json(msg=str(e))
@ -82,5 +77,5 @@ def main():
module.exit_json(**result)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@ -14,13 +14,14 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '0.1',
'status': ['preview'],
'supported_by': 'community'
"metadata_version": "0.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
@ -109,11 +110,19 @@ file_written:
import os # noqa: E402
from copy import deepcopy # noqa: E402
from ansible.module_utils.basic import AnsibleModule # noqa: E402
def format_msg_filename(text):
for s in (" ", ":", ".", "/", ",", "'", ):
for s in (
" ",
":",
".",
"/",
",",
"'",
):
text = text.replace(s, "_")
return "_" + text.rstrip("_") + ".log"
@ -122,77 +131,82 @@ def main():
module = AnsibleModule(
argument_spec=dict(
config=dict(type='dict', default={}),
files=dict(type='dict', required=True),
result=dict(type='path'),
result_file_dir=dict(type='path'),
))
if not module.params['files']:
config=dict(type="dict", default={}),
files=dict(type="dict", required=True),
result=dict(type="path"),
result_file_dir=dict(type="path"),
)
)
if not module.params["files"]:
module.fail_json(msg="Files for logs parsing have to be provided!")
existing_files = []
for pattern_file in module.params['files']:
file_ = module.params['files'][pattern_file]
for pattern_file in module.params["files"]:
file_ = module.params["files"][pattern_file]
if os.path.exists(file_):
existing_files.append(file_)
if not existing_files:
results = {"processed_files": [], 'changed': False}
results = {"processed_files": [], "changed": False}
module.exit_json(**results)
dict_patterns = deepcopy(module.params['config'])
dict_patterns = deepcopy(module.params["config"])
# from sova_lib import Pattern, parse
from ansible.module_utils.sova_lib import Pattern, parse
pattern = Pattern(dict_patterns)
PATTERNS = pattern.patterns
for name in module.params['files']:
for name in module.params["files"]:
if name not in PATTERNS:
module.fail_json(msg="File name %s wasn't found in [%s]" % (
name, ", ".join(list(PATTERNS.keys()))))
module.fail_json(
msg="File name %s wasn't found in [%s]"
% (name, ", ".join(list(PATTERNS.keys())))
)
messages, tags = [], []
for name, file_ in module.params['files'].items():
if module.params['files'][name] not in existing_files:
for name, file_ in module.params["files"].items():
if module.params["files"][name] not in existing_files:
continue
ids, msgs = parse(file_, PATTERNS[name])
found = [i for i in PATTERNS[name] if i['id'] in ids]
msg_tags = [i['tag'] for i in found if i.get('tag')]
found = [i for i in PATTERNS[name] if i["id"] in ids]
msg_tags = [i["tag"] for i in found if i.get("tag")]
messages += msgs
tags += msg_tags
messages = list(set(messages))
tags = list(set(tags))
if 'infra' in tags:
reason = 'infra'
elif 'code' in tags:
reason = 'code'
if "infra" in tags:
reason = "infra"
elif "code" in tags:
reason = "code"
else:
reason = 'unknown'
reason = "unknown"
text = " ".join(messages) or "No failure reason found"
file_name = format_msg_filename(text)
result = {'changed': True, "processed_files": existing_files}
result.update({'message': text})
result.update({'tags': tags})
if module.params['result'] and messages:
result = {"changed": True, "processed_files": existing_files}
result.update({"message": text})
result.update({"tags": tags})
if module.params["result"] and messages:
try:
with open(module.params['result'], "w") as f:
with open(module.params["result"], "w") as f:
f.write(text + "\n")
f.write("Reason: " + reason + "\n")
result.update({'file_written': module.params['result']})
result.update({"file_written": module.params["result"]})
except Exception as e:
module.fail_json(
msg="Can't write result to file %s: %s" % (
module.params['result'], str(e)))
if module.params['result_file_dir']:
log_file = os.path.join(module.params['result_file_dir'], file_name)
msg="Can't write result to file %s: %s"
% (module.params["result"], str(e))
)
if module.params["result_file_dir"]:
log_file = os.path.join(module.params["result_file_dir"], file_name)
try:
with open(log_file, "w") as f:
f.write(text + "\n")
f.write("Reason: " + reason + "\n")
result.update({'file_name_written': log_file})
result.update({"file_name_written": log_file})
except Exception as e:
module.fail_json(
msg="Can't write result to file %s: %s" % (log_file, str(e)))
msg="Can't write result to file %s: %s" % (log_file, str(e))
)
module.exit_json(**result)
if __name__ == '__main__':
if __name__ == "__main__":
main()

2
pyproject.toml Normal file
View File

@ -0,0 +1,2 @@
[tool.isort]
profile = "black"

View File

@ -16,49 +16,47 @@
# Usage: openstack stack event list -f json overcloud | \
# heat-deploy-times.py [list of resource names]
# If no resource names are provided, all of the resources will be output.
from __future__ import (absolute_import, division, print_function)
from __future__ import absolute_import, division, print_function
import json
import sys
import time
__metaclass__ = type
def process_events(all_events, events):
times = {}
for event in all_events:
name = event['resource_name']
status = event['resource_status']
name = event["resource_name"]
status = event["resource_status"]
# Older clients return timestamps in the first format, newer ones
# append a Z. This way we can handle both formats.
try:
strptime = time.strptime(event['event_time'],
'%Y-%m-%dT%H:%M:%S')
strptime = time.strptime(event["event_time"], "%Y-%m-%dT%H:%M:%S")
except ValueError:
strptime = time.strptime(event['event_time'],
'%Y-%m-%dT%H:%M:%SZ')
strptime = time.strptime(event["event_time"], "%Y-%m-%dT%H:%M:%SZ")
etime = time.mktime(strptime)
if name in events:
if status == 'CREATE_IN_PROGRESS':
times[name] = {'start': etime, 'elapsed': None}
elif status == 'CREATE_COMPLETE':
times[name]['elapsed'] = etime - times[name]['start']
for name, data in sorted(times.items(),
key=lambda x: x[1]['elapsed'],
reverse=True):
elapsed = 'Still in progress'
if times[name]['elapsed'] is not None:
elapsed = times[name]['elapsed']
print('%s %s') % (name, elapsed)
if status == "CREATE_IN_PROGRESS":
times[name] = {"start": etime, "elapsed": None}
elif status == "CREATE_COMPLETE":
times[name]["elapsed"] = etime - times[name]["start"]
for name, data in sorted(
times.items(), key=lambda x: x[1]["elapsed"], reverse=True
):
elapsed = "Still in progress"
if times[name]["elapsed"] is not None:
elapsed = times[name]["elapsed"]
print("%s %s") % (name, elapsed)
if __name__ == '__main__':
if __name__ == "__main__":
stdin = sys.stdin.read()
all_events = json.loads(stdin)
events = sys.argv[1:]
if not events:
events = set()
for event in all_events:
events.add(event['resource_name'])
events.add(event["resource_name"])
process_events(all_events, events)

View File

@ -11,9 +11,11 @@
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
from __future__ import absolute_import, division, print_function
import gzip
import logging
import yaml
try:
@ -24,9 +26,12 @@ except ImportError:
__metaclass__ = type
logging.basicConfig(
format=('%(asctime)s - %(name)s - %(levelname)s - '
'%(module)s.%(funcName)s:%(lineno)d - %(message)s'))
log = logging.getLogger('parser')
format=(
"%(asctime)s - %(name)s - %(levelname)s - "
"%(module)s.%(funcName)s:%(lineno)d - %(message)s"
)
)
log = logging.getLogger("parser")
log.setLevel(logging.ERROR)
@ -46,22 +51,23 @@ class Pattern(object):
def setup_regexes(self):
self.regexes = {}
if self.config:
for regexp in self.config.get('regexes', []):
for regexp in self.config.get("regexes", []):
flags = []
if regexp.get('multiline'):
if regexp.get("multiline"):
flags.append(regex_module.MULTILINE)
self.regexes[regexp.get('name')] = regex_module.compile(
r'{0}'.format(regexp.get('regex')), *flags)
self.regexes[regexp.get("name")] = regex_module.compile(
r"{0}".format(regexp.get("regex")), *flags
)
def setup_patterns(self):
self._patterns = self.config.get('patterns', {})
self._patterns = self.config.get("patterns", {})
if self._patterns:
for key in self._patterns:
for p in self._patterns[key]:
if p['pattern'] in self.regexes:
p['pattern'] = self.regexes[p['pattern']]
if p['logstash'] in self.regexes:
p['logstash'] = self.regexes[p['logstash']]
if p["pattern"] in self.regexes:
p["pattern"] = self.regexes[p["pattern"]]
if p["logstash"] in self.regexes:
p["logstash"] = self.regexes[p["logstash"]]
@property
def patterns(self):
@ -92,12 +98,9 @@ def parse(text_file, patterns):
with open_func(text_file, "rt") as finput:
text = finput.read()
for p in patterns:
line_matched = line_match(
p["pattern"], text, exclude=p.get("exclude"))
line_matched = line_match(p["pattern"], text, exclude=p.get("exclude"))
if line_matched:
log.debug(
"Found pattern %s in file %s",
repr(p), text_file)
log.debug("Found pattern %s in file %s", repr(p), text_file)
ids.append(p["id"])
msgs.append(p["msg"].format(line_matched))
return list(set(ids)), list(set(msgs))

View File

@ -34,8 +34,9 @@ skip_authors = True
skip_changelog = True
[flake8]
# E123, E125 skipped as they are invalid PEP-8.
# E265 deals with spaces inside of comments
# black compatible settings
# https://black.readthedocs.io/en/stable/the_black_code_style.html
max-line-length = 88
extend-ignore = E203,E501,W503
show-source = True
ignore = E123,E125,E265
builtins = _

View File

@ -12,12 +12,11 @@
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import (absolute_import, division, print_function)
from __future__ import absolute_import, division, print_function
import setuptools
__metaclass__ = type
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
setuptools.setup(setup_requires=["pbr"], pbr=True)

View File

@ -1,21 +1,21 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import json
from unittest.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
from unittest.mock import patch
def set_module_args(**args):
if '_ansible_remote_tmp' not in args:
args['_ansible_remote_tmp'] = '/tmp'
if '_ansible_keep_remote_files' not in args:
args['_ansible_keep_remote_files'] = False
if "_ansible_remote_tmp" not in args:
args["_ansible_remote_tmp"] = "/tmp"
if "_ansible_keep_remote_files" not in args:
args["_ansible_keep_remote_files"] = False
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
args = json.dumps({"ANSIBLE_MODULE_ARGS": args})
basic._ANSIBLE_ARGS = to_bytes(args)
@ -28,20 +28,22 @@ class AnsibleFailJson(Exception):
def exit_json(*args, **kwargs):
if 'changed' not in kwargs:
kwargs['changed'] = False
if "changed" not in kwargs:
kwargs["changed"] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs):
kwargs['failed'] = True
kwargs["failed"] = True
raise AnsibleFailJson(kwargs)
class ModuleTestCase:
def setup_method(self):
self.mock_module = patch.multiple(
basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json,
basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json,
)
self.mock_module.start()
@ -50,4 +52,4 @@ class ModuleTestCase:
def generate_name(test_case):
return test_case['name']
return test_case["name"]

View File

@ -1,12 +1,16 @@
from __future__ import (absolute_import, division, print_function)
import pytest # noqa
from __future__ import absolute_import, division, print_function
import os
import sys
from common.utils import (
AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args,
)
import yaml
import pytest # noqa
import yaml
from common.utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
__metaclass__ = type
SAMPLE_INPUT_1 = """
@ -33,15 +37,14 @@ import flatten_nested_dict # noqa: E402
class TestFlattenNestedDict(ModuleTestCase):
def test_invalid_args(self):
set_module_args(
data="invalid",
)
with pytest.raises(AnsibleFailJson) as context:
flatten_nested_dict.main()
assert context.value.args[0]['failed'] is True
assert 'msg' in context.value.args[0]
assert context.value.args[0]["failed"] is True
assert "msg" in context.value.args[0]
def test_empty(self):
set_module_args(
@ -49,14 +52,11 @@ class TestFlattenNestedDict(ModuleTestCase):
)
with pytest.raises(AnsibleExitJson) as context:
flatten_nested_dict.main()
assert context.value.args[0] == {'data': [], 'changed': False}
assert context.value.args[0] == {"data": [], "changed": False}
def test_one(self):
set_module_args(
data=yaml.safe_load(SAMPLE_INPUT_1)['data']
)
set_module_args(data=yaml.safe_load(SAMPLE_INPUT_1)["data"])
with pytest.raises(AnsibleExitJson) as context:
flatten_nested_dict.main()
assert context.value.args[0]['changed'] is False
assert context.value.args[0]['data'] == \
yaml.safe_load(SAMPLE_OUTPUT_1)['data']
assert context.value.args[0]["changed"] is False
assert context.value.args[0]["data"] == yaml.safe_load(SAMPLE_OUTPUT_1)["data"]