Add tripleo-validations role for component testing

This role used component vars dictionary.
The list of components and associated validations
is not exhautive yet, but will be add in a follow up
review, when the role will be experienced in the pipeline

Depends-On: https://review.rdoproject.org/r/#/c/31281/
Change-Id: I6bd5211518a56f7b5c8ad6843ef2a784eb062e98
(cherry picked from commit 02b1fcfdbe)
This commit is contained in:
Mathieu Bultel 2020-09-30 23:26:38 +02:00 committed by mbu
parent 72f6dc2f37
commit 2a3c64c843
6 changed files with 263 additions and 0 deletions

View File

@ -0,0 +1,25 @@
---
val_working_dir: "{{ working_dir }}"
validation_log_dir: "/var/log/validations"
validation_environment: "{{ job.environment_type|default('undercloud') }}"
# Passing default to component, allow to run the role outside of CI
component: "{{ job.component|default(None) }}"
# both commands can be used:
# "openstack tripleo validator" or "/usr/bin/validation.py"
# the default is validation.py because this is the 1st entry available on
# a system for validation.
# The openstack command needs to install python-tripleoclient and its
# dependencies first. This value should be override as needed in the roles/vars
# calls.
validation_command: "/usr/bin/validation.py"
# Enabling validation run
run_tripleo_validations_tests: false
run_validation: true
# Run list and group boolean:
run_from_list: true
run_from_group: false
# Jinja script name for the tripleo validation CLI tests:
run_validations_cli_tests_script: run-validations-cli-tests.sh.j2

View File

@ -0,0 +1,16 @@
---
- name: Ensure tripleo-validations is present
yum:
name: openstack-tripleo-validations
state: present
become: true
- name: Run validations
include_tasks: run.yml
when:
- run_validation|default(false)|bool
- component != ''
- name: Run validations tests scripts
include_tasks: test_script.yml
when: run_tripleo_validations_tests|bool

View File

@ -0,0 +1,42 @@
---
- name: Make sure Validations Log dir exists
become: true
file:
path: "{{ validation_log_dir }}"
state: directory
mode: "0755"
owner: "{{ ansible_user }}"
group: "{{ ansible_user }}"
recurse: true
- name: Run validations from list
register: run_validation_list
shell: |
export OS_CLOUD={{ validation_environment }}
{{ validation_command }} run --validation {{ item }} > validation_{{ item }}.log 2>&1
when:
- run_from_list|bool
loop: "{{ validations_list[component] }}"
- name: Run validations by group
register: run_validation_group
shell: |
export OS_CLOUD={{ validation_environment }}
{{ validation_command }} run --group {{ item }} > validation_group_{{ item }}.log 2>&1
when:
- run_from_group|bool
loop: "{{ validations_group[component] }}"
- name: Check Validation results
register: run_output
shell: |
cat validation_{{ item }}.log | grep {{ item }} | awk '{ print $6; }' | sed 's/\x1B\[[0-9;]\{1,\}[A-Za-z]//g'
when:
- run_from_list|bool
loop: "{{ validations_list[component] }}"
- name: Fail if
fail:
msg: "Validation failed: {{ item }} has failed."
when: item.stdout != "PASSED"
loop: "{{ run_output.results }}"

View File

@ -0,0 +1,13 @@
---
- name: Create run-validations-cli-tests.sh
template:
src: "{{ run_validations_cli_tests_script }}"
dest: "{{ val_working_dir }}/run-validations-cli-tests.sh"
mode: 0755
when: run_tripleo_validations_tests|bool
- name: Run TripleO validations script tests
register: run_tripleo_validation_tests
shell: |
{{ val_working_dir }}/run-validations-cli-tests.sh > run_validations_cli_tests.log 2>&1
when: run_tripleo_validations_tests|bool

View File

@ -0,0 +1,149 @@
#!/bin/bash
set -ux
### --start_docs
## Run CLI Validations tests
## =========================
## Prepare Your Environment
## ------------------------
## * Source in the undercloud credentials.
## ::
WORKING_DIR={{ val_working_dir }}
source ${WORKING_DIR}/stackrc
VALIDATIONS_CLI="{{ tripleo_validation_command }}"
LIST_FAILURE_COUNTER=0
RUN_FAILURE_COUNTER=0
## CLI Validations Listing tests
## -----------------------------
function run_validations_cli_listing {
## * Get the list of all the existing validations through the CLI
## with no arguments (table output by default)
## ::
time ${VALIDATIONS_CLI} list
if [ $? -eq 0 ]; then
echo "Validations Listing (no args) passed successfully"
else
echo "Validations Listing (no args) Failed"
LIST_FAILURE_COUNTER=$(($LIST_FAILURE_COUNTER+1))
fi
## * Get the list of all the existing validations through the CLI
## with JSON output
## ::
time ${VALIDATIONS_CLI} list -f json
if [ $? -eq 0 ]; then
echo "Validations Listing (JSON format) passed successfully"
else
echo "Validations Listing (JSON Format) Failed"
LIST_FAILURE_COUNTER=$(($LIST_FAILURE_COUNTER+1))
fi
## * Get the list of all the existing validations through the CLI
## with YAML output
## ::
time ${VALIDATIONS_CLI} list -f yaml
if [ $? -eq 0 ]; then
echo "Validations Listing (YAML format) passed successfully"
else
echo "Validations Listing (YAML Format) Failed"
LIST_FAILURE_COUNTER=$(($LIST_FAILURE_COUNTER+1))
fi
## * Get all the information about the undercloud-ram
## validation
## ::
time ${VALIDATIONS_CLI} show check-ram
if [ $? -eq 0 ]; then
echo "Listing validation information passed successfully"
else
echo "Listing validation information Failed"
LIST_FAILURE_COUNTER=$(($LIST_FAILURE_COUNTER+1))
fi
## * Get the list of all parameters available for the check-ram
## validation
## ::
time ${VALIDATIONS_CLI} show parameter --validation check-ram
if [ $? -eq 0 ]; then
echo "Listing validation parameters passed successfully"
else
echo "Listing validation parameters failed"
LIST_FAILURE_COUNTER=$(($LIST_FAILURE_COUNTER+1))
fi
## * Get the list of all parameters available for the undercloud-ram
## validation and create a JSON file with those variables
## ::
if [ -f ${WORKING_DIR}/uc-ram-vars.json ]; then
rm -Rf ${WORKING_DIR}/uc-ram-vars.json
fi
time ${VALIDATIONS_CLI} show parameter \
--download json ${WORKING_DIR}/uc-ram-vars.json \
--validation check-ram
if [ $? -eq 0 ] && [ -f ${WORKING_DIR}/uc-ram-vars.json ]; then
echo "Creating file with validation parameters passed successfully"
else
echo "Creating file with validation parameters failed"
LIST_FAILURE_COUNTER=$(($LIST_FAILURE_COUNTER+1))
fi
}
function run_validations_cli_run {
## * Run no-op validations through the CLI
## ::
time ${VALIDATIONS_CLI} run --validation no-op
if [ $? -eq 0 ]; then
echo "No-op validations run passed successfully"
else
echo "No-op validations run Failed"
RUN_FAILURE_COUNTER=$(($RUN_FAILURE_COUNTER+1))
fi
## * Run no-op validations through the CLI
## ::
${VALIDATIONS_CLI} run --group no-op
if [ $? -eq 0 ]; then
echo "No-op validations group run passed successfully"
else
echo "No-op validations group run Failed"
RUN_FAILURE_COUNTER=$(($RUN_FAILURE_COUNTER+1))
fi
}
run_validations_cli_listing
run_validations_cli_run
if [ $LIST_FAILURE_COUNTER -gt 0 ] || [ $RUN_FAILURE_COUNTER -gt 0 ]; then
echo "At least one test has failed!"
exit 1
else
exit 0
fi
## --stop_docs

View File

@ -0,0 +1,18 @@
---
validations_list:
compute:
- nova-status
- nova-svirt
network:
- neutron-sanity-check
validation:
- check-ram
- check-cpu
- 512e
tripleo:
- undercloud-disk-space
validations_group:
compute:
- compute
network:
- network