Fix ansible-test sanity

Change-Id: I9ce4d3dbd8a9ca1c3d0610f3a07f063632c67bfc
This commit is contained in:
Sorin Sbarnea 2021-02-02 12:32:50 +00:00
parent 3f0881dfff
commit 0608040b24
16 changed files with 191 additions and 125 deletions

View File

@ -28,9 +28,10 @@
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import (absolute_import, division, print_function)
import sphinx_rtd_theme
__metaclass__ = type
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
@ -135,7 +136,7 @@ latex_elements = {
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
}
rst_prolog = """
.. |project| replace:: %s

View File

@ -1,4 +1,3 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -12,10 +11,10 @@
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
import gzip
import logging
import yaml
try:
import regex as regex_module
@ -23,6 +22,7 @@ except ImportError:
import re as regex_module
__metaclass__ = type
logging.basicConfig(
format=('%(asctime)s - %(name)s - %(levelname)s - '
'%(module)s.%(funcName)s:%(lineno)d - %(message)s'))
@ -38,6 +38,8 @@ class Pattern(object):
self.setup_patterns()
def load_yaml(self):
import yaml
if isinstance(self.data, dict):
self.config = self.data
else:
@ -51,7 +53,7 @@ class Pattern(object):
if regexp.get('multiline'):
flags.append(regex_module.MULTILINE)
self.regexes[regexp.get('name')] = regex_module.compile(
r'{}'.format(regexp.get('regex')), *flags)
r'{0}'.format(regexp.get('regex')), *flags)
def setup_patterns(self):
self._patterns = self.config.get('patterns', {})
@ -95,8 +97,9 @@ def parse(text_file, patterns):
line_matched = line_match(
p["pattern"], text, exclude=p.get("exclude"))
if line_matched:
log.debug("Found pattern {} in file {}".format(
repr(p), text_file))
log.debug(
"Found pattern %s in file %s",
repr(p), text_file)
ids.append(p["id"])
msgs.append(p["msg"].format(line_matched))
return list(set(ids)), list(set(msgs))

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@ -11,37 +11,50 @@
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
import ast
import datetime
import socket
__metaclass__ = type
DOCUMENTATION = '''
---
module: ara_graphite
version_added: "1.0"
version_added: "1.0.0"
author: Red Hat (@RedHatOfficial)
short_description: Send ARA stats to graphite
description:
- Python ansible module to send ARA stats to graphite
description: >
Python ansible module to send ARA stats to graphite
options:
graphite_host:
description:
- The hostname of the Graphite server with optional port:
graphite.example.com:2004. The default port is 2003
description: >
The hostname of the Graphite server with optional port:
graphite.example.com:2004. The default port is 2003
required: True
type: str
graphite_prefix:
description:
- TBD
type: str
graphite_port:
description:
- TBD
default: 2003
type: int
ara_mapping:
description:
- Mapping task names to Graphite paths
description: >
Mapping task names to Graphite paths
required: True
type: dict
ara_data:
description:
- List of ARA results: ara result list --all -f json
required: True
ara_only_successful:
description:
- Whether to send only successful tasks, ignoring skipped and failed,
by default True.
description: >
List of ARA results: ara result list --all -f json
required: True
type: str
only_successful_tasks:
description: >
Whether to send only successful tasks, ignoring skipped and failed,
by default True.
required: False
default: True
type: bool
'''
EXAMPLES = '''
@ -56,6 +69,10 @@ EXAMPLES = '''
- "Name of task that deploys overcloud": overcloud.deploy.seconds
'''
import ast # noqa: E402
import datetime # noqa: E402
import socket # noqa: E402
def stamp(x):
'''Convert ISO timestamp to Unix timestamp
@ -110,8 +127,8 @@ def send(data, gr_host, gr_port, prefix):
s.settimeout(3.0)
try:
s.connect((gr_host, gr_port))
except Exception as e:
return False, str(e)
except Exception as exc:
return False, str(exc)
for content in data:
s.send(prefix + " ".join([str(i) for i in content]) + "\n")
s.close()
@ -144,7 +161,9 @@ def send_stats(gr_host, gr_port, mapping, json_data, prefix, only_ok):
def main():
module = AnsibleModule( # noqa
from ansible.module_utils.basic import AnsibleModule
module = AnsibleModule(
argument_spec=dict(
graphite_host=dict(required=True, type='str'),
graphite_port=dict(required=False, type='int', default=2003),
@ -164,7 +183,5 @@ def main():
module.exit_json(**result)
from ansible.module_utils.basic import * # noqa
if __name__ == "__main__":
main()

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@ -11,68 +11,85 @@
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
---
module: ara_influxdb
version_added: "1.0"
version_added: "1.0.0"
author: Red Hat (@RedHatOfficial)
short_description: Send ARA stats to InfluxDB
description:
- Python ansible module to send ARA stats to InfluxDB timeseries database
description: |
Python ansible module to send ARA stats to InfluxDB timeseries database
options:
influxdb_url:
description:
- The URL of HTTP API of InfluxDB server:
for example https://influxdb.example.com
description: |
The URL of HTTP API of InfluxDB server:
for example https://influxdb.example.com
required: True
type: str
influxdb_port:
description:
- The port of HTTP API of InfluxDB server, by default is 8086
description: |
The port of HTTP API of InfluxDB server, by default is 8086
required: True
type: int
influxdb_user:
description:
- User for authentication to InfluxDB server
description: |
User for authentication to InfluxDB server
required: False
type: str
influxdb_password:
description:
- Password for authentication to InfluxDB server
description: |
Password for authentication to InfluxDB server
required: False
type: str
influxdb_db:
description:
- Database name in InfluxDB server for sending data to it
description: |
Database name in InfluxDB server for sending data to it
required: True
type: str
measurement:
description:
- Name of Influx measurement in database
description: |
Name of Influx measurement in database
required: True
type: str
data_file:
description:
- Path to file to save InfluxDB data in it
description: |
Path to file to save InfluxDB data in it
required: True
type: str
ara_data:
description:
- List of ARA results: ara result list --all -f json
description: |
List of ARA results: ara result list --all -f json
required: True
type: str
only_successful_tasks:
description:
- Whether to send only successful tasks, ignoring skipped and failed,
by default True.
description: |
Whether to send only successful tasks, ignoring skipped and failed,
by default True.
required: True
type: bool
mapped_fields:
description:
- Whether to use configured static map of fields and tasks,
by default True.
description: |
Whether to use configured static map of fields and tasks,
by default True.
required: False
default: True
type: bool
standard_fields:
description:
- Whether to send standard fields of each job, i.e. times,
by default True.
description: >
Whether to send standard fields of each job, i.e. times,
by default True.
required: False
default: True
type: bool
longest_tasks:
description:
- Whether to to print only longest tasks and how many,
by default 0.
description: >
Whether to to print only longest tasks and how many,
by default 0.
required: False
type: int
'''
EXAMPLES = '''
@ -102,9 +119,6 @@ import datetime # noqa pylint: disable=C0413
import json # noqa pylint: disable=C0413
import os # noqa pylint: disable=C0413
import re # noqa pylint: disable=C0413
import requests # noqa pylint: disable=C0413
from requests.auth import HTTPBasicAuth # noqa pylint: disable=C0413
SCHEME = '{measure},{tags} {fields} {timestamp}'
@ -441,6 +455,9 @@ def send(file_path, in_url, in_port, in_user, in_pass, in_db):
:param in_db: InfluxDB database name
:return: True if sent successfully, otherwise False
'''
import requests # noqa pylint: disable=C0413
from requests.auth import HTTPBasicAuth # noqa pylint: disable=C0413
url = in_url.rstrip("/")
if in_port != 80:
url += ":%d" % in_port

22
plugins/modules/flatten_nested_dict.py Executable file → Normal file
View File

@ -14,7 +14,6 @@
# limitations under the License.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.basic import AnsibleModule
ANSIBLE_METADATA = {
@ -25,9 +24,8 @@ ANSIBLE_METADATA = {
DOCUMENTATION = """
module: flatten_nested_dict
author:
- "Sorin Sbarnea (@ssbarnea)"
version_added: '2.7'
author: Red Hat (@RedHatOfficial)
version_added: '2.7.0'
short_description: Flattens a nested dictionary into a list
notes: []
description:
@ -38,11 +36,6 @@ options:
- Nested dictionary
required: True
type: dict
result:
description:
- List of commands to run.
type: list
elements: dict
"""
EXAMPLES = """
- name: Determine commands to run
@ -63,13 +56,18 @@ data:
'group': 'system'
"""
from ansible.module_utils.basic import AnsibleModule # noqa: E402
def main():
result = {'data': [], 'changed': False}
module = AnsibleModule(
argument_spec=dict(
data=dict(type='dict', default={}),
))
argument_spec={
'data': {
'type': 'dict',
'required': True
}
})
try:
for group, commands in module.params['data'].items():

View File

@ -16,11 +16,6 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.sova_lib import Pattern, parse
ANSIBLE_METADATA = {
'metadata_version': '0.1',
@ -30,9 +25,8 @@ ANSIBLE_METADATA = {
DOCUMENTATION = """
module: sova
author:
- "Sagi Shnaidman (@sshnaidm)"
version_added: '2.7'
author: Sagi Shnaidman (@sshnaidm)
version_added: '2.7.0'
short_description: Parse CI jobs files for known failures
notes: []
description:
@ -59,6 +53,9 @@ options:
created file Overcloud_failed_on_host.log in this directory.
It helps to know what is the reason without opening actually the file.
type: path
config:
description: config
type: dict
"""
EXAMPLES = """
- name: Run sova task
@ -110,6 +107,10 @@ file_written:
sample: '/var/log/result_file'
"""
import os # noqa: E402
from copy import deepcopy # noqa: E402
from ansible.module_utils.basic import AnsibleModule # noqa: E402
def format_msg_filename(text):
for s in (" ", ":", ".", "/", ",", "'", ):
@ -118,10 +119,11 @@ def format_msg_filename(text):
def main():
module = AnsibleModule(
argument_spec=dict(
config=dict(type='dict', default={}),
files=dict(type='dict', default={}),
files=dict(type='dict', required=True),
result=dict(type='path'),
result_file_dir=dict(type='path'),
))
@ -136,6 +138,10 @@ def main():
results = {"processed_files": [], 'changed': False}
module.exit_json(**results)
dict_patterns = deepcopy(module.params['config'])
# from sova_lib import Pattern, parse
from ansible.module_utils.sova_lib import Pattern, parse
pattern = Pattern(dict_patterns)
PATTERNS = pattern.patterns
for name in module.params['files']:

View File

@ -16,13 +16,13 @@ get_engine() {
}
container_cp() {
${engine} cp ${1}:${2} $3
${engine} cp "${1}:${2}" "$3"
};
engine=$(get_engine)
echo "${engine} was detected."
BASE_CONTAINER_EXTRA=/var/log/extra/${engine};
mkdir -p $BASE_CONTAINER_EXTRA;
mkdir -p "$BASE_CONTAINER_EXTRA";
ALL_FILE=$BASE_CONTAINER_EXTRA/${engine}_allinfo.log;
CONTAINER_INFO_CMDS=(
@ -35,52 +35,54 @@ CONTAINER_INFO_CMDS=(
);
for cmd in "${CONTAINER_INFO_CMDS[@]}"; do
echo "+ $cmd" >> $ALL_FILE;
$cmd >> $ALL_FILE;
echo "" >> $ALL_FILE;
echo "" >> $ALL_FILE;
{
echo "+ $cmd"
$cmd
echo ""
echo ""
} >> "$ALL_FILE"
done;
# Get only failed containers, in a dedicated file
${engine} ps -a | grep -vE ' (IMAGE|Exited \(0\)|Up) ' &>> /var/log/extra/failed_containers.log;
# Get inspect infos for all containers even the ones not running.
for cont in $(${engine} ps -a | awk {'print $NF'} | grep -v NAMES); do
for cont in $(${engine} ps -a | awk '{print $NF}' | grep -v NAMES); do
INFO_DIR=$BASE_CONTAINER_EXTRA/containers/${cont};
mkdir -p $INFO_DIR;
mkdir -p "$INFO_DIR";
(
${engine} inspect $cont;
) &> $INFO_DIR/${engine}_info.log;
${engine} inspect "$cont";
) &> "$INFO_DIR/${engine}_info.log";
done;
# Get other infos for running containers
for cont in $(${engine} ps | awk {'print $NF'} | grep -v NAMES); do
for cont in $(${engine} ps | awk '{print $NF}' | grep -v NAMES); do
INFO_DIR=$BASE_CONTAINER_EXTRA/containers/${cont};
mkdir -p $INFO_DIR;
mkdir -p "$INFO_DIR";
(
if [ ${engine} = 'docker' ]; then
${engine} top $cont auxw;
if [ "${engine}" = 'docker' ]; then
${engine} top "$cont" auxw;
# NOTE(cjeanner): `podman top` does not support `ps` options.
elif [ ${engine} = 'podman' ]; then
${engine} top $cont;
elif [ "${engine}" = 'podman' ]; then
${engine} top "$cont";
fi
${engine} exec $cont vmstat -s
${engine} exec $cont ps axfo %mem,size,rss,vsz,pid,args
${engine} exec -u root $cont bash -c "\$(command -v dnf || command -v yum) list installed";
) &>> $INFO_DIR/${engine}_info.log;
${engine} exec "$cont" vmstat -s
${engine} exec "$cont" ps axfo %mem,size,rss,vsz,pid,args
${engine} exec -u root "$cont" bash -c "\$(command -v dnf || command -v yum) list installed";
) &>> "$INFO_DIR/${engine}_info.log";
container_cp $cont /var/lib/kolla/config_files/config.json $INFO_DIR/config.json;
container_cp "$cont" /var/lib/kolla/config_files/config.json "$INFO_DIR/config.json";
# NOTE(flaper87): This should go away. Services should be
# using a `logs` volume
# NOTE(mandre) Do not copy logs if the containers is bind mounting /var/log directory
if ! ${engine} inspect $cont | jq .[0].Mounts[].Source | grep -x '"/var/log[/]*"' 2>1 > /dev/null; then
container_cp $cont /var/log $INFO_DIR/log;
if ! ${engine} inspect "$cont" | jq .[0].Mounts[].Source | grep -x '"/var/log[/]*"' >/dev/null 2>&1; then
container_cp "$cont" /var/log "$INFO_DIR/log";
fi;
# Delete symlinks because they break log collection and are generally
# not useful
find $INFO_DIR -type l -delete;
find "$INFO_DIR" -type l -delete;
done;
# NOTE(cjeanner) previous loop cannot have the "-a" flag because of the
@ -88,10 +90,10 @@ done;
# in order to get all the logs we can. For instance, the previous loop
# would not allow to know why a container is "Exited (1)", preventing
# efficient debugging.
for cont in $(${engine} ps -a | awk {'print $NF'} | grep -v NAMES); do
for cont in $(${engine} ps -a | awk '{print $NF}' | grep -v NAMES); do
INFO_DIR=$BASE_CONTAINER_EXTRA/containers/${cont};
mkdir -p $INFO_DIR;
${engine} logs $cont &> $INFO_DIR/stdout.log;
mkdir -p "$INFO_DIR";
${engine} logs "$cont" &> "$INFO_DIR/stdout.log";
done;
# NOTE(flaper87) Copy contents from the logs volume. We can expect this
@ -99,5 +101,5 @@ done;
# NOTE(cjeanner): Rather test the eXistenZ of the volume, as podman does not
# have such thing
if [ -d /var/lib/docker/volumes/logs/_data ]; then
cp -r /var/lib/docker/volumes/logs/_data $BASE_CONTAINER_EXTRA/logs;
cp -r /var/lib/docker/volumes/logs/_data "$BASE_CONTAINER_EXTRA/logs";
fi

View File

@ -16,12 +16,15 @@
# Usage: openstack stack event list -f json overcloud | \
# heat-deploy-times.py [list of resource names]
# If no resource names are provided, all of the resources will be output.
from __future__ import (absolute_import, division, print_function)
import json
import sys
import time
__metaclass__ = type
def process_events(all_events, events):
times = {}
for event in all_events:

View File

@ -1,4 +1,3 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -12,7 +11,7 @@
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import (absolute_import, division, print_function)
import gzip
import logging
import yaml
@ -23,6 +22,7 @@ except ImportError:
import re as regex_module
__metaclass__ = type
logging.basicConfig(
format=('%(asctime)s - %(name)s - %(levelname)s - '
'%(module)s.%(funcName)s:%(lineno)d - %(message)s'))
@ -51,7 +51,7 @@ class Pattern(object):
if regexp.get('multiline'):
flags.append(regex_module.MULTILINE)
self.regexes[regexp.get('name')] = regex_module.compile(
r'{}'.format(regexp.get('regex')), *flags)
r'{0}'.format(regexp.get('regex')), *flags)
def setup_patterns(self):
self._patterns = self.config.get('patterns', {})
@ -95,8 +95,9 @@ def parse(text_file, patterns):
line_matched = line_match(
p["pattern"], text, exclude=p.get("exclude"))
if line_matched:
log.debug("Found pattern {} in file {}".format(
repr(p), text_file))
log.debug(
"Found pattern %s in file %s",
repr(p), text_file)
ids.append(p["id"])
msgs.append(p["msg"].format(line_matched))
return list(set(ids)), list(set(msgs))

View File

@ -12,8 +12,12 @@
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import (absolute_import, division, print_function)
import setuptools
__metaclass__ = type
setuptools.setup(
setup_requires=['pbr'],
pbr=True)

View File

@ -0,0 +1,5 @@
plugins/modules/ara_graphite.py validate-modules:missing-gplv3-license
plugins/modules/ara_influxdb.py validate-modules:missing-gplv3-license
plugins/modules/flatten_nested_dict.py validate-modules:missing-gplv3-license
plugins/modules/sova.py validate-modules:missing-gplv3-license
docs/static/env-setup-virt.rst rstcheck

1
tests/sanity/ignore-2.9.txt Symbolic link
View File

@ -0,0 +1 @@
ignore-2.10.txt

View File

@ -0,0 +1,2 @@
pyyaml
requests

View File

@ -1,3 +1,4 @@
from __future__ import (absolute_import, division, print_function)
import pytest # noqa
import os
import sys
@ -7,6 +8,7 @@ from common.utils import (
import yaml
__metaclass__ = type
SAMPLE_INPUT_1 = """
data:
system:

View File

@ -64,3 +64,8 @@ deps =
pytest-plus # provides support for PYTEST_REQPASS
commands =
python -m pytest --color=yes --html={envlogdir}/reports.html --self-contained-html {tty:-s} {posargs}
[testenv:ansible]
description = Used as base for all tox-ansible environments
basepython = python3.6

View File

@ -27,8 +27,7 @@
jobs: &jobs
- openstack-tox-linters
- openstack-tox-molecule
- tox-ansible-test-sanity:
voting: false # until we fix reported errors
- tox-ansible-test-sanity
# Limit the number of jobs executed while still assuring a relevant
# level of coverage. If specific tasks are to be tested we should
# consider implementing functional tests for them, especially as