Retire Tripleo: remove repo content
TripleO project is retiring - https://review.opendev.org/c/openstack/governance/+/905145 this commit remove the content of this project repo Change-Id: I4c7d62a3a3ef5601d04375677e1ea07f9cd9eff9
This commit is contained in:
parent
44fdc64724
commit
9080d003d9
@ -1,37 +0,0 @@
|
||||
exclude_paths:
|
||||
- releasenotes/
|
||||
- ci/playbooks/
|
||||
parseable: true
|
||||
quiet: false
|
||||
rulesdir:
|
||||
- .ansible-lint_rules/
|
||||
|
||||
# Mock modules or roles in order to pass ansible-playbook --syntax-check
|
||||
mock_modules:
|
||||
- hiera # Modules can only be installed by rpm
|
||||
- validations_read_ini # Modules can only be installed by rpm
|
||||
- warn # Modules can only be installed by rpm
|
||||
- tripleo_overcloud_role_list # Modules can only be installed by rpm
|
||||
- tripleo_overcloud_role_show # Modules can only be installed by rpm
|
||||
mock_roles:
|
||||
- check_latest_packages_version
|
||||
|
||||
skip_list:
|
||||
# Lines should be no longer than 120 chars.
|
||||
- '204'
|
||||
# Using command rather module we have where
|
||||
# we need to use curl or rsync.
|
||||
- '303'
|
||||
# shell tasks uses pipeline without pipefail,
|
||||
# this requires refactoring, skip for now.
|
||||
- '306'
|
||||
# Tasks that run when changed should likely be handlers
|
||||
# this requires refactoring, skip for now.
|
||||
- '503'
|
||||
# meta/main.yml should contain relevant info
|
||||
- '701'
|
||||
# Tags must contain lowercase letters and digits only
|
||||
- '702'
|
||||
# meta/main.yml default values should be changed
|
||||
- '703'
|
||||
verbosity: 1
|
@ -1,197 +0,0 @@
|
||||
import os
|
||||
import yaml
|
||||
from ansiblelint.errors import MatchError
|
||||
from ansiblelint.rules import AnsibleLintRule
|
||||
|
||||
|
||||
class ValidationHasMetadataRule(AnsibleLintRule):
|
||||
id = '750'
|
||||
shortdesc = 'Validation playbook must have mandatory metadata'
|
||||
|
||||
info = """
|
||||
---
|
||||
- hosts: localhost
|
||||
vars:
|
||||
metadata:
|
||||
name: Validation Name
|
||||
description: >
|
||||
A full description of the validation.
|
||||
groups:
|
||||
- group1
|
||||
- group2
|
||||
- group3
|
||||
categories:
|
||||
- category1
|
||||
- category2
|
||||
- category3
|
||||
products:
|
||||
- product1
|
||||
- product2
|
||||
- product3
|
||||
"""
|
||||
|
||||
description = (
|
||||
"The Validation playbook must have mandatory metadata:\n"
|
||||
"```{}```".format(info)
|
||||
)
|
||||
|
||||
severity = 'HIGH'
|
||||
tags = ['metadata']
|
||||
|
||||
no_vars_found = "The validation playbook must contain a 'vars' dictionary"
|
||||
no_meta_found = (
|
||||
"The validation playbook must contain "
|
||||
"a 'metadata' dictionary under vars"
|
||||
)
|
||||
no_classification_found = \
|
||||
"*metadata* should contain a list of {classification}"
|
||||
|
||||
unknown_classifications_found = (
|
||||
"Unkown {classification_key}(s) '{unknown_classification}' found! "
|
||||
"The official list of {classification_key} are '{known_classification}'. "
|
||||
)
|
||||
|
||||
how_to_add_classification = {
|
||||
'groups': (
|
||||
"To add a new validation group, please add it in the groups.yaml "
|
||||
"file at the root of the tripleo-validations project."
|
||||
)
|
||||
}
|
||||
|
||||
def get_classifications(self, classification='groups'):
|
||||
"""Returns a list classification names
|
||||
defined for tripleo-validations in the '{classification}.yaml' file
|
||||
located in the base repo directory.
|
||||
"""
|
||||
file_path = os.path.abspath(classification + '.yaml')
|
||||
|
||||
try:
|
||||
with open(file_path, "r") as definitions:
|
||||
contents = yaml.safe_load(definitions)
|
||||
except (PermissionError, OSError):
|
||||
raise RuntimeError(
|
||||
"{}.yaml file at '{}' inacessible.".format(
|
||||
classification,
|
||||
file_path))
|
||||
|
||||
results = [name for name, _ in contents.items()]
|
||||
|
||||
return results
|
||||
|
||||
def check_classification(self, metadata, path,
|
||||
classification_key, strict=False):
|
||||
"""Check validatity of validation classifications,
|
||||
such as groups, categories and products.
|
||||
This one is tricky.
|
||||
Empty lists in python evaluate as false
|
||||
So we can't just check for truth value of returned list.
|
||||
Instead we have to compare the returned value with `None`.
|
||||
"""
|
||||
classification = metadata.get(classification_key, None)
|
||||
|
||||
if classification is None:
|
||||
return MatchError(
|
||||
message=self.no_classification_found.format(
|
||||
classification=classification_key
|
||||
),
|
||||
filename=path,
|
||||
details=str(metadata))
|
||||
else:
|
||||
if not isinstance(classification, list):
|
||||
return MatchError(
|
||||
message="*{}* must be a list".format(classification_key),
|
||||
filename=path,
|
||||
details=str(metadata))
|
||||
elif strict:
|
||||
classifications = self.get_classifications(classification_key)
|
||||
unknown_classifications = list(
|
||||
set(classification) - set(classifications))
|
||||
if unknown_classifications:
|
||||
message = self.unknown_classifications_found.format(
|
||||
unknown_classification=unknown_classifications,
|
||||
known_classification=classifications,
|
||||
classification_key=classification_key)
|
||||
message += self.how_to_add_classification.get(classification_key, "")
|
||||
return MatchError(
|
||||
message=message,
|
||||
filename=path,
|
||||
details=str(metadata))
|
||||
|
||||
def matchplay(self, file, data):
|
||||
results = []
|
||||
path = file['path']
|
||||
|
||||
if file['type'] == 'playbook':
|
||||
if path.startswith("playbooks/") \
|
||||
or "tripleo-validations/playbooks/" in path:
|
||||
|
||||
# *hosts* line check
|
||||
hosts = data.get('hosts', None)
|
||||
if not hosts:
|
||||
results.append(
|
||||
MatchError(
|
||||
message="No *hosts* key found in the playbook",
|
||||
filename=path,
|
||||
details=str(data)))
|
||||
|
||||
# *vars* lines check
|
||||
vars = data.get('vars', None)
|
||||
if not vars:
|
||||
results.append(
|
||||
MatchError(
|
||||
message=self.no_vars_found,
|
||||
filename=path,
|
||||
details=str(data)))
|
||||
else:
|
||||
if not isinstance(vars, dict):
|
||||
results.append(
|
||||
MatchError(
|
||||
message='*vars* must be a dictionary',
|
||||
filename=path,
|
||||
details=str(data)))
|
||||
|
||||
# *metadata* lines check
|
||||
metadata = data['vars'].get('metadata', None)
|
||||
if metadata:
|
||||
if not isinstance(metadata, dict):
|
||||
results.append(
|
||||
MatchError(
|
||||
message='*metadata* must be a dictionary',
|
||||
filename=path,
|
||||
details=str(data)))
|
||||
else:
|
||||
results.append(
|
||||
MatchError(
|
||||
message=self.no_meta_found,
|
||||
filename=path,
|
||||
details=str(data)))
|
||||
|
||||
# *metadata>[name|description] lines check
|
||||
for info in ['name', 'description']:
|
||||
if not metadata.get(info, None):
|
||||
results.append(
|
||||
MatchError(
|
||||
message='*metadata* must contain a %s key' % info,
|
||||
filename=path,
|
||||
details=str(data)))
|
||||
continue
|
||||
if not isinstance(metadata.get(info), str):
|
||||
results.append(
|
||||
MatchError(
|
||||
message='*%s* should be a string' % info,
|
||||
filename=path,
|
||||
details=str(data)))
|
||||
|
||||
#Checks for metadata we use to classify validations.
|
||||
#Groups, categories and products
|
||||
for classification in ['categories', 'products', 'groups']:
|
||||
classification_error = self.check_classification(
|
||||
metadata,
|
||||
path,
|
||||
classification,
|
||||
strict=(classification == 'groups'))
|
||||
|
||||
if classification_error:
|
||||
results.append(classification_error)
|
||||
|
||||
return results
|
@ -1,37 +0,0 @@
|
||||
# Molecule managed
|
||||
# Copyright 2021 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
{% if item.registry is defined %}
|
||||
FROM {{ item.registry.url }}/{{ item.image }}
|
||||
{% else %}
|
||||
FROM {{ item.image }}
|
||||
{% endif %}
|
||||
|
||||
RUN if [ $(command -v apt-get) ]; then apt-get update && apt-get install -y python sudo bash ca-certificates && apt-get clean; \
|
||||
elif [ $(command -v dnf) ]; then dnf makecache && dnf --assumeyes install sudo python*-devel python*-dnf bash epel-release {{ item.pkg_extras | default('') }} && dnf clean all; \
|
||||
elif [ $(command -v yum) ]; then yum makecache fast && yum install -y python sudo yum-plugin-ovl python-setuptools bash {{ item.pkg_extras | default('') }} && sed -i 's/plugins=0/plugins=1/g' /etc/yum.conf && yum clean all; \
|
||||
elif [ $(command -v zypper) ]; then zypper refresh && zypper install -y python sudo bash python-xml {{ item.pkg_extras | default('') }} && zypper clean -a; \
|
||||
elif [ $(command -v apk) ]; then apk update && apk add --no-cache python sudo bash ca-certificates {{ item.pkg_extras | default('') }}; \
|
||||
elif [ $(command -v xbps-install) ]; then xbps-install -Syu && xbps-install -y python sudo bash ca-certificates {{ item.pkg_extras | default('') }} && xbps-remove -O; fi
|
||||
|
||||
{% for pkg in item.easy_install | default([]) %}
|
||||
# install pip for centos where there is no python-pip rpm in default repos
|
||||
RUN easy_install {{ pkg }}
|
||||
{% endfor %}
|
||||
|
||||
|
||||
CMD ["sh", "-c", "while true; do sleep 10000; done"]
|
@ -1,70 +0,0 @@
|
||||
---
|
||||
# Tripleo-validations uses a shared molecule configuration file to avoid
|
||||
# repetition. That configuration file is located at the repository level
|
||||
# ({REPO}/.config/molecule/config.yml) and defines all the default values for
|
||||
# all the molecule.yml files across all the roles. By default, the role-addition
|
||||
# process will produce an empty molecule.yml inheriting this config.yml file.
|
||||
#
|
||||
# Any key defined in the role molecule.yml file will override values from this
|
||||
# config.yml file.
|
||||
#
|
||||
# IMPORTANT: if you want to override the default values set here in this file,
|
||||
# you will have to redefine them completely in your molecule.yml (at the role
|
||||
# level) and add your extra configuration!
|
||||
#
|
||||
# For instance, if you need to add an extra package in your CentOS 8 Stream
|
||||
# container, you will have to add the entire "platforms" key into your
|
||||
# molecule.yml file and add your package name in the pkg_extras key.
|
||||
#
|
||||
# No merge will happen between your molecule.yml and this config.yml
|
||||
# files. That's why you will have to redefine them completely.
|
||||
|
||||
driver:
|
||||
name: podman
|
||||
|
||||
log: true
|
||||
|
||||
platforms:
|
||||
- name: centos
|
||||
hostname: centos
|
||||
image: centos/centos:stream8
|
||||
registry:
|
||||
url: quay.io
|
||||
dockerfile: ../../../../.config/molecule/Dockerfile
|
||||
pkg_extras: python*-setuptools python*-pyyaml
|
||||
volumes:
|
||||
- /etc/ci/mirror_info.sh:/etc/ci/mirror_info.sh:ro
|
||||
privileged: true
|
||||
environment: &env
|
||||
http_proxy: "{{ lookup('env', 'http_proxy') }}"
|
||||
https_proxy: "{{ lookup('env', 'https_proxy') }}"
|
||||
ulimits: &ulimit
|
||||
- host
|
||||
|
||||
provisioner:
|
||||
name: ansible
|
||||
inventory:
|
||||
hosts:
|
||||
all:
|
||||
hosts:
|
||||
centos:
|
||||
ansible_python_interpreter: /usr/bin/python3
|
||||
log: true
|
||||
options:
|
||||
vvv: true
|
||||
env:
|
||||
ANSIBLE_STDOUT_CALLBACK: yaml
|
||||
ANSIBLE_ROLES_PATH: "${ANSIBLE_ROLES_PATH}:${HOME}/zuul-jobs/roles"
|
||||
ANSIBLE_LIBRARY: "${ANSIBLE_LIBRARY:-/usr/share/ansible/plugins/modules}"
|
||||
|
||||
scenario:
|
||||
test_sequence:
|
||||
- destroy
|
||||
- create
|
||||
- prepare
|
||||
- converge
|
||||
- verify
|
||||
- destroy
|
||||
|
||||
verifier:
|
||||
name: ansible
|
13
.coveragerc
13
.coveragerc
@ -1,13 +0,0 @@
|
||||
[run]
|
||||
branch = True
|
||||
source =
|
||||
tripleo_validations
|
||||
library
|
||||
lookup_plugins
|
||||
omit = tripleo-validations/openstack/*
|
||||
|
||||
[report]
|
||||
ignore_errors = True
|
||||
omit =
|
||||
tripleo_validations/tests/*
|
||||
tests/*
|
@ -1,55 +0,0 @@
|
||||
# Docker/Podman image doesn't need any files that git doesn't track.
|
||||
#Therefore the .dockerignore largely follows the structure of .gitignore.
|
||||
# C extensions
|
||||
*.so
|
||||
# Packages
|
||||
*.egg*
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
# Unit test / coverage reports
|
||||
cover/
|
||||
.coverage*
|
||||
!.coveragerc
|
||||
.tox
|
||||
nosetests.xml
|
||||
.testrepository
|
||||
.venv
|
||||
.stestr/*
|
||||
# Translations
|
||||
*.mo
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
# Complexity
|
||||
output/*.html
|
||||
output/*/index.html
|
||||
# Sphinx
|
||||
doc/build
|
||||
doc/source/reference/api/
|
||||
# pbr generates these
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
# Editors
|
||||
*~
|
||||
.*.swp
|
||||
.*sw?
|
||||
# Files created by releasenotes build
|
||||
releasenotes/build
|
||||
# Ansible specific
|
||||
hosts
|
||||
*.retry
|
||||
#Vagrantfiles, since we are using docker
|
||||
Vagrantfile.*
|
67
.gitignore
vendored
67
.gitignore
vendored
@ -1,67 +0,0 @@
|
||||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Packages
|
||||
*.egg*
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
cover/
|
||||
.coverage*
|
||||
!.coveragerc
|
||||
.tox
|
||||
nosetests.xml
|
||||
.testrepository
|
||||
.venv
|
||||
.stestr/*
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# Complexity
|
||||
output/*.html
|
||||
output/*/index.html
|
||||
|
||||
# Sphinx
|
||||
doc/build
|
||||
|
||||
# pbr generates these
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
||||
# Editors
|
||||
*~
|
||||
.*.swp
|
||||
.*sw?
|
||||
doc/source/validations-*.rst
|
||||
|
||||
# Files created by releasenotes build
|
||||
releasenotes/build
|
||||
|
||||
# Ansible specific
|
||||
hosts
|
||||
*.retry
|
||||
|
||||
# Roles testing
|
||||
roles/roles.galaxy
|
3
.mailmap
3
.mailmap
@ -1,3 +0,0 @@
|
||||
# Format is:
|
||||
# <preferred e-mail> <other e-mail 1>
|
||||
# <preferred e-mail> <other e-mail 2>
|
@ -1,53 +0,0 @@
|
||||
---
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.4.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: mixed-line-ending
|
||||
- id: check-byte-order-marker
|
||||
- id: check-executables-have-shebangs
|
||||
- id: check-merge-conflict
|
||||
- id: check-symlinks
|
||||
- id: debug-statements
|
||||
files: .*\.(yaml|yml)$
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 3.9.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [flake8-typing-imports==1.12.0]
|
||||
entry: flake8 --ignore=E24,E121,E122,E123,E124,E126,E226,E265,E305,E402,F401,F405,E501,E704,F403,F841,W503,W605
|
||||
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.33.0
|
||||
hooks:
|
||||
- id: yamllint
|
||||
files: \.(yaml|yml)$
|
||||
types: [file, yaml]
|
||||
entry: yamllint --strict -f parsable
|
||||
|
||||
- repo: https://github.com/ansible-community/ansible-lint
|
||||
rev: v5.3.2
|
||||
hooks:
|
||||
- id: ansible-lint
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
additional_dependencies:
|
||||
- 'ansible-core<2.12'
|
||||
verbose: true
|
||||
entry: ansible-lint --force-color -p -v
|
||||
|
||||
- repo: https://github.com/openstack-dev/bashate.git
|
||||
rev: 2.0.0
|
||||
hooks:
|
||||
- id: bashate
|
||||
entry: bashate --error . --verbose --ignore=E006,E040
|
||||
# Run bashate check for all bash scripts
|
||||
# Ignores the following rules:
|
||||
# E006: Line longer than 79 columns (as many scripts use jinja
|
||||
# templating, this is very difficult)
|
||||
# E040: Syntax error determined using `bash -n` (as many scripts
|
||||
# use jinja templating, this will often fail and the syntax
|
||||
# error will be discovered in execution anyway)
|
@ -1,3 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_path=${TEST_PATH:-./tripleo_validations/tests}
|
||||
top_dir=./
|
@ -1,7 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
||||
${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
14
.yamllint
14
.yamllint
@ -1,14 +0,0 @@
|
||||
---
|
||||
extends: default
|
||||
|
||||
rules:
|
||||
line-length:
|
||||
# matches hardcoded 160 value from ansible-lint
|
||||
max: 160
|
||||
indentation:
|
||||
spaces: consistent
|
||||
indent-sequences: true
|
||||
check-multi-line-strings: false
|
||||
ignore: |
|
||||
zuul.d/molecule.yaml
|
||||
releasenotes/notes/*.yaml
|
@ -1,56 +0,0 @@
|
||||
For general information on contributing to OpenStack, please check out the
|
||||
`contributor guide <https://docs.openstack.org/contributors/>`_ to get started.
|
||||
It covers all the basics that are common to all OpenStack projects: the accounts
|
||||
you need, the basics of interacting with our Gerrit review system, how we
|
||||
communicate as a community, etc.
|
||||
|
||||
The information below will cover the project specific information you need to get started with TripleO.
|
||||
|
||||
Documentation
|
||||
=============
|
||||
Documentation for the TripleO project can be found `here <https://docs.openstack.org/tripleo-docs/latest/index.html>`_
|
||||
|
||||
Communication
|
||||
=============
|
||||
* IRC channel ``#validation-framework`` at `Libera`_ (For all subject-matters)
|
||||
* IRC channel ``#tripleo`` at `OFTC`_ (OpenStack and TripleO discussions)
|
||||
* Mailing list (prefix subjects with ``[tripleo][validations]`` for faster responses)
|
||||
http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack-discuss
|
||||
|
||||
.. _Libera: https://libera.chat/
|
||||
.. _OFTC: https://www.oftc.net/
|
||||
|
||||
Contacting the Core Team
|
||||
========================
|
||||
Please refer to the `TripleO Core Team
|
||||
<https://review.opendev.org/#/admin/groups/190,members>`_ contacts.
|
||||
|
||||
Bug Tracking
|
||||
=============
|
||||
We track our tasks in `Launchpad <https://bugs.launchpad.net/tripleo/+bugs?field.tag=validations>`_ and in
|
||||
`StoryBoard <https://storyboard.openstack.org/#!/project_group/76>`_
|
||||
|
||||
Reporting a Bug
|
||||
===============
|
||||
You found an issue and want to make sure we are aware of it? You can do so on
|
||||
`Launchpad <https://bugs.launchpad.net/tripleo/+filebug>`__. Please, add the
|
||||
validations tag to your bug.
|
||||
|
||||
More info about Launchpad usage can be found on `OpenStack docs page
|
||||
<https://docs.openstack.org/contributors/common/task-tracking.html#launchpad>`_
|
||||
|
||||
Getting Your Patch Merged
|
||||
=========================
|
||||
All changes proposed to the TripleO requires two ``Code-Review +2`` votes from
|
||||
TripleO core reviewers before one of the core reviewers can approve patch by
|
||||
giving ``Workflow +1`` vote.
|
||||
|
||||
Project Team Lead Duties
|
||||
========================
|
||||
All common PTL duties are enumerated in the `PTL guide
|
||||
<https://docs.openstack.org/project-team-guide/ptl.html>`_.
|
||||
|
||||
The Release Process for TripleO is documented in `Release Management
|
||||
<https://docs.openstack.org/tripleo-docs/latest/developer/release.html>`_.
|
||||
|
||||
Documentation for the TripleO project can be found `here <https://docs.openstack.org/tripleo-docs/latest/index.html>`_
|
@ -1,4 +0,0 @@
|
||||
tripleo-validations Style Commandments
|
||||
===============================================
|
||||
|
||||
Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/
|
175
LICENSE
175
LICENSE
@ -1,175 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
@ -1,6 +0,0 @@
|
||||
include AUTHORS
|
||||
include ChangeLog
|
||||
exclude .gitignore
|
||||
exclude .gitreview
|
||||
|
||||
global-exclude *.pyc
|
28
README.rst
28
README.rst
@ -1,22 +1,10 @@
|
||||
.. image:: https://governance.openstack.org/tc/badges/tripleo-validations.svg
|
||||
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
||||
This project is no longer maintained.
|
||||
|
||||
.. Change things from this point on
|
||||
The contents of this repository are still available in the Git
|
||||
source code management system. To see the contents of this
|
||||
repository before it reached its end of life, please check out the
|
||||
previous commit with "git checkout HEAD^1".
|
||||
|
||||
A collection of Ansible roles and playbooks to detect and report potential
|
||||
issues during TripleO deployments.
|
||||
|
||||
The validations will help detect issues early in the deployment process and
|
||||
prevent field engineers from wasting time on misconfiguration or hardware
|
||||
issues in their environments.
|
||||
|
||||
All validations are written in Ansible and are written in a way that's
|
||||
consumable by the `Validation Framework Command Line Interface (CLI)
|
||||
<https://docs.openstack.org/validations-libs/latest/reference/api/validations_libs.cli.html>`_
|
||||
or by Ansible directly.
|
||||
|
||||
* Free software: Apache license
|
||||
* Documentation: https://docs.openstack.org/tripleo-validations/latest/
|
||||
* Release notes: https://docs.openstack.org/releasenotes/tripleo-validations/
|
||||
* Source: https://opendev.org/openstack/tripleo-validations
|
||||
* Bugs: https://storyboard.openstack.org/#!/project/openstack/tripleo-validations
|
||||
For any further questions, please email
|
||||
openstack-discuss@lists.openstack.org or join #openstack-dev on
|
||||
OFTC.
|
||||
|
@ -1,7 +0,0 @@
|
||||
---
|
||||
collections:
|
||||
- containers.podman
|
||||
- community.general
|
||||
- community.crypto
|
||||
- ansible.posix
|
||||
- openstack.cloud
|
@ -1,27 +0,0 @@
|
||||
export TRIPLEO_VALIDATIONS_WORKPATH="$(dirname $(readlink -f ${BASH_SOURCE[0]}))"
|
||||
export ANSIBLE_STDOUT_CALLBACK=debug
|
||||
export ANSIBLE_CALLBACK_PLUGINS="${TRIPLEO_VALIDATIONS_WORKPATH}/callback_plugins"
|
||||
|
||||
ANSIBLE_LIBRARY="${TRIPLEO_VALIDATIONS_WORKPATH}/library"
|
||||
export ANSIBLE_LIBRARY="${ANSIBLE_LIBRARY}:${TRIPLEO_VALIDATIONS_WORKPATH}/roles/roles.galaxy/validations-common/validations_common/library"
|
||||
export ANSIBLE_LOOKUP_PLUGINS="${TRIPLEO_VALIDATIONS_WORKPATH}/lookup_plugins"
|
||||
|
||||
export ANSIBLE_ROLES_PATH="${TRIPLEO_VALIDATIONS_WORKPATH}/roles"
|
||||
export ANSIBLE_ROLES_PATH="${ANSIBLE_ROLES_PATH}:${TRIPLEO_VALIDATIONS_WORKPATH}/roles/roles.galaxy/tripleo-ansible/tripleo_ansible/roles"
|
||||
|
||||
export ANSIBLE_INVENTORY="${TRIPLEO_VALIDATIONS_WORKPATH}/tests/hosts.ini"
|
||||
export ANSIBLE_RETRY_FILES_ENABLED="0"
|
||||
export ANSIBLE_LOAD_CALLBACK_PLUGINS="1"
|
||||
export ANSIBLE_HOST_KEY_CHECKING=False
|
||||
|
||||
function unset-ansible-test-env {
|
||||
for i in $(env | grep ANSIBLE_ | awk -F'=' '{print $1}'); do
|
||||
unset ${i}
|
||||
done
|
||||
unset TRIPLEO_VALIDATIONS_WORKPATH
|
||||
echo -e "Ansible test environment deactivated.\n"
|
||||
unset -f unset-ansible-test-env
|
||||
}
|
||||
|
||||
echo -e "Ansible test environment is now active"
|
||||
echo -e "Run 'unset-ansible-test-env' to deactivate.\n"
|
@ -1,4 +0,0 @@
|
||||
[defaults]
|
||||
retry_files_enabled = False
|
||||
host_key_checking=False
|
||||
stdout_callback = default
|
41
bindep.txt
41
bindep.txt
@ -1,41 +0,0 @@
|
||||
# This file facilitates OpenStack-CI package installation
|
||||
# before the execution of any tests.
|
||||
#
|
||||
# See the following for details:
|
||||
# - https://docs.openstack.org/infra/bindep/
|
||||
# - https://opendev.org/opendev/bindep/
|
||||
#
|
||||
# Even if the role does not make use of this facility, it
|
||||
# is better to have this file empty, otherwise OpenStack-CI
|
||||
# will fall back to installing its default packages which
|
||||
# will potentially be detrimental to the tests executed.
|
||||
|
||||
# The gcc compiler
|
||||
gcc
|
||||
|
||||
# Base requirements for RPM distros
|
||||
gcc-c++ [platform:rpm]
|
||||
git [platform:rpm]
|
||||
libffi-devel [platform:rpm]
|
||||
openssl-devel [platform:rpm]
|
||||
podman [platform:rpm]
|
||||
python3-devel [platform:rpm !platform:rhel-7 !platform:centos-7]
|
||||
PyYAML [platform:rpm !platform:rhel-8 !platform:centos-8 !platform:fedora]
|
||||
python3-pyyaml [platform:rpm !platform:rhel-7 !platform:centos-7]
|
||||
python3-dnf [platform:rpm !platform:rhel-7 !platform:centos-7]
|
||||
|
||||
# RH Mechanisms
|
||||
python-rhsm-certificates [platform:redhat]
|
||||
|
||||
# SELinux cent7
|
||||
libselinux-python3 [platform:rpm !platform:rhel-8 !platform:centos-8]
|
||||
libsemanage-python3 [platform:redhat !platform:rhel-8 !platform:centos-8]
|
||||
# SELinux cent8
|
||||
python3-libselinux [platform:rpm !platform:rhel-7 !platform:centos-7]
|
||||
python3-libsemanage [platform:redhat !platform:rhel-7 !platform:centos-7]
|
||||
|
||||
# Required for compressing collected log files in CI
|
||||
gzip
|
||||
|
||||
# Required to build language docs
|
||||
gettext
|
@ -1,111 +0,0 @@
|
||||
---
|
||||
- hosts: all
|
||||
pre_tasks:
|
||||
- name: Set project path fact
|
||||
set_fact:
|
||||
tripleo_validations_project_path: "{{ ansible_user_dir }}/{{ zuul.projects['opendev.org/openstack/tripleo-validations'].src_dir }}"
|
||||
|
||||
- name: Ensure output dirs
|
||||
file:
|
||||
path: "{{ ansible_user_dir }}/zuul-output/logs"
|
||||
state: directory
|
||||
|
||||
- name: Ensure pip is available
|
||||
include_role:
|
||||
name: ensure-pip
|
||||
|
||||
- name: Ensure virtualenv is available
|
||||
include_role:
|
||||
name: ensure-virtualenv
|
||||
|
||||
- name: Setup bindep
|
||||
pip:
|
||||
name: "bindep"
|
||||
virtualenv: "{{ ansible_user_dir }}/test-python"
|
||||
virtualenv_command: "{{ ensure_pip_virtualenv_command }}"
|
||||
virtualenv_site_packages: true
|
||||
|
||||
- name: Set containers module to 3.0
|
||||
become: true
|
||||
shell: |
|
||||
dnf module disable container-tools:rhel8 -y
|
||||
dnf module enable container-tools:3.0 -y
|
||||
dnf clean metadata
|
||||
|
||||
- name: Run bindep
|
||||
shell: |-
|
||||
. {{ ansible_user_dir }}/test-python/bin/activate
|
||||
{{ tripleo_validations_project_path }}/scripts/bindep-install
|
||||
become: true
|
||||
changed_when: false
|
||||
|
||||
- name: Ensure a recent version of pip is installed in virtualenv
|
||||
pip:
|
||||
name: "pip>=19.1.1"
|
||||
virtualenv: "{{ ansible_user_dir }}/test-python"
|
||||
virtualenv_command: "{{ ensure_pip_virtualenv_command }}"
|
||||
|
||||
- name: Setup test-python
|
||||
pip:
|
||||
requirements: "{{ tripleo_validations_project_path }}/molecule-requirements.txt"
|
||||
virtualenv: "{{ ansible_user_dir }}/test-python"
|
||||
virtualenv_command: "{{ ensure_pip_virtualenv_command }}"
|
||||
virtualenv_site_packages: true
|
||||
|
||||
- name: Set up collections
|
||||
command: "{{ ansible_user_dir }}/test-python/bin/ansible-galaxy install -fr {{ tripleo_validations_project_path }}/ansible-collections-requirements.yml"
|
||||
|
||||
- name: Display test-python virtualenv package versions
|
||||
shell: |-
|
||||
. {{ ansible_user_dir }}/test-python/bin/activate
|
||||
pip freeze
|
||||
|
||||
- name: Basic ci setup
|
||||
become: true
|
||||
block:
|
||||
- name: Ensure ci directories
|
||||
file:
|
||||
path: "/etc/ci"
|
||||
state: "directory"
|
||||
|
||||
- name: Ensure ci mirror file
|
||||
file:
|
||||
path: "/etc/ci/mirror_info.sh"
|
||||
state: "touch"
|
||||
|
||||
- name: Set an appropriate fs.file-max
|
||||
sysctl:
|
||||
name: fs.file-max
|
||||
value: 2048000
|
||||
sysctl_set: true
|
||||
state: present
|
||||
reload: true
|
||||
|
||||
- name: Set container_manage_cgroup boolean
|
||||
seboolean:
|
||||
name: container_manage_cgroup
|
||||
state: true
|
||||
persistent: true
|
||||
failed_when: false
|
||||
|
||||
- name: Create limits file for containers
|
||||
copy:
|
||||
content: |
|
||||
* soft nofile 102400
|
||||
* hard nofile 204800
|
||||
* soft nproc 2048
|
||||
* hard nproc 4096
|
||||
dest: /etc/security/limits.d/containers.conf
|
||||
|
||||
- name: Reset ssh connection
|
||||
meta: reset_connection
|
||||
tasks:
|
||||
- name: Get necessary git repos
|
||||
git:
|
||||
repo: https://opendev.org/openstack/{{ item }}
|
||||
dest: "{{ tripleo_validations_project_path }}/roles/roles.galaxy/{{ item }}"
|
||||
version: master
|
||||
force: true
|
||||
with_items:
|
||||
- tripleo-ansible
|
||||
- validations-common
|
@ -1,14 +0,0 @@
|
||||
---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- name: set basic zuul fact
|
||||
set_fact:
|
||||
zuul:
|
||||
projects:
|
||||
"opendev.org/openstack/tripleo-validations":
|
||||
src_dir: "{{ tripleo_src }}"
|
||||
ansible_connection: ssh
|
||||
|
||||
- import_playbook: pre.yml
|
||||
|
||||
- import_playbook: run.yml
|
@ -1,42 +0,0 @@
|
||||
---
|
||||
|
||||
- hosts: all
|
||||
environment:
|
||||
ANSIBLE_LOG_PATH: "{{ ansible_user_dir }}/zuul-output/logs/ansible-execution.log"
|
||||
pre_tasks:
|
||||
|
||||
- name: Set project path fact
|
||||
set_fact:
|
||||
tripleo_validations_project_path: "{{ ansible_user_dir }}/{{ zuul.projects['opendev.org/openstack/tripleo-validations'].src_dir }}"
|
||||
|
||||
- name: Set roles path fact
|
||||
set_fact:
|
||||
tripleo_validations_roles_paths:
|
||||
- "{{ tripleo_validations_project_path }}/roles/roles.galaxy/tripleo-ansible/tripleo_ansible/roles"
|
||||
- "{{ tripleo_validations_project_path }}/roles/roles.galaxy/validations-common/validations_common/roles"
|
||||
- "{{ tripleo_validations_project_path }}/roles"
|
||||
- "/usr/share/ansible/roles"
|
||||
|
||||
- name: Set library path fact
|
||||
set_fact:
|
||||
tripleo_validations_library_paths:
|
||||
- "{{ tripleo_validations_project_path }}/roles/roles.galaxy/validations-common/validations_common/library"
|
||||
- "{{ tripleo_validations_project_path }}/library"
|
||||
- "/usr/share/ansible/library"
|
||||
|
||||
tasks:
|
||||
- name: Run role test job
|
||||
shell: |-
|
||||
. {{ ansible_user_dir }}/test-python/bin/activate
|
||||
. {{ tripleo_validations_project_path }}/ansible-test-env.rc
|
||||
pytest --color=yes \
|
||||
--html={{ ansible_user_dir }}/zuul-output/logs/reports.html \
|
||||
--self-contained-html \
|
||||
--ansible-args='{{ tripleo_job_ansible_args | default("") }}' \
|
||||
{{ tripleo_validations_project_path }}/tests/test_molecule.py
|
||||
args:
|
||||
chdir: "{{ tripleo_validations_project_path }}/roles/{{ tripleo_validations_role_name }}"
|
||||
executable: /bin/bash
|
||||
environment:
|
||||
ANSIBLE_ROLES_PATH: "{{ tripleo_validations_roles_paths | join(':') }}"
|
||||
ANSIBLE_LIBRARY: "{{ tripleo_validations_library_paths | join(':') }}"
|
@ -1,7 +0,0 @@
|
||||
# this is required for the docs build jobs
|
||||
sphinx>=2.0.0,!=2.1.0 # BSD
|
||||
openstackdocstheme>=2.2.2 # Apache-2.0
|
||||
reno>=3.1.0 # Apache-2.0
|
||||
doc8>=0.8.0 # Apache-2.0
|
||||
bashate>=0.6.0 # Apache-2.0
|
||||
ruamel.yaml>=0.15.5 # MIT
|
@ -1,402 +0,0 @@
|
||||
# Copyright 2019 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import imp
|
||||
import os
|
||||
|
||||
from docutils import core
|
||||
from docutils import nodes
|
||||
from docutils.parsers.rst import Directive
|
||||
from docutils.parsers import rst
|
||||
from docutils.writers.html4css1 import Writer
|
||||
|
||||
from sphinx import addnodes
|
||||
|
||||
import yaml
|
||||
from ruamel.yaml import YAML as RYAML
|
||||
|
||||
try:
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
except ImportError:
|
||||
import StringIO
|
||||
|
||||
|
||||
class DocYaml(RYAML):
|
||||
def _license_filter(self, data):
|
||||
"""This will filter out our boilerplate license heading in return data.
|
||||
|
||||
The filter is used to allow documentation we're creating in variable
|
||||
files to be rendered more beautifully.
|
||||
"""
|
||||
lines = list()
|
||||
mark = True
|
||||
for line in data.splitlines():
|
||||
if '# Copyright' in line:
|
||||
mark = False
|
||||
if mark:
|
||||
lines.append(line)
|
||||
if '# under the License' in line:
|
||||
mark = True
|
||||
return '\n'.join(lines)
|
||||
|
||||
def dump(self, data, stream=None, **kw):
|
||||
if not stream:
|
||||
stream = StringIO()
|
||||
try:
|
||||
RYAML.dump(self, data, stream, **kw)
|
||||
return self._license_filter(stream.getvalue().strip())
|
||||
finally:
|
||||
stream.close()
|
||||
|
||||
|
||||
DOCYAML = DocYaml()
|
||||
DOCYAML.default_flow_style = False
|
||||
|
||||
|
||||
class AnsibleAutoPluginDirective(Directive):
|
||||
directive_name = "ansibleautoplugin"
|
||||
has_content = True
|
||||
option_spec = {
|
||||
'module': rst.directives.unchanged,
|
||||
'role': rst.directives.unchanged,
|
||||
'documentation': rst.directives.unchanged,
|
||||
'examples': rst.directives.unchanged
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _render_html(source):
|
||||
return core.publish_parts(
|
||||
source=source,
|
||||
writer=Writer(),
|
||||
writer_name='html',
|
||||
settings_overrides={'no_system_messages': True}
|
||||
)
|
||||
|
||||
def make_node(self, title, contents, content_type=None):
|
||||
section = self._section_block(title=title)
|
||||
if not content_type:
|
||||
# Doc section
|
||||
for content in contents['docs']:
|
||||
for paragraph in content.split('\n'):
|
||||
retnode = nodes.paragraph()
|
||||
retnode.append(self._raw_html_block(data=paragraph))
|
||||
section.append(retnode)
|
||||
|
||||
# Options Section
|
||||
options_list = nodes.field_list()
|
||||
options_section = self._section_block(title='Options')
|
||||
for key, value in contents['options'].items():
|
||||
options_list.append(
|
||||
self._raw_fields(
|
||||
data=value['description'],
|
||||
field_name=key
|
||||
)
|
||||
)
|
||||
else:
|
||||
options_section.append(options_list)
|
||||
section.append(options_section)
|
||||
|
||||
# Authors Section
|
||||
authors_list = nodes.field_list()
|
||||
authors_list.append(
|
||||
self._raw_fields(
|
||||
data=contents['author']
|
||||
)
|
||||
)
|
||||
authors_section = self._section_block(title='Authors')
|
||||
authors_section.append(authors_list)
|
||||
section.append(authors_section)
|
||||
|
||||
elif content_type == 'yaml':
|
||||
for content in contents:
|
||||
section.append(
|
||||
self._literal_block(
|
||||
data=content,
|
||||
dump_data=False
|
||||
)
|
||||
)
|
||||
|
||||
return section
|
||||
|
||||
@staticmethod
|
||||
def load_module(filename):
|
||||
return imp.load_source('__ansible_module__', filename)
|
||||
|
||||
@staticmethod
|
||||
def build_documentation(module):
|
||||
docs = DOCYAML.load(module.DOCUMENTATION)
|
||||
doc_data = dict()
|
||||
doc_data['docs'] = docs['description']
|
||||
doc_data['author'] = docs.get('author', list())
|
||||
doc_data['options'] = docs.get('options', dict())
|
||||
return doc_data
|
||||
|
||||
@staticmethod
|
||||
def build_examples(module):
|
||||
examples = DOCYAML.load(module.EXAMPLES)
|
||||
return_examples = list()
|
||||
for example in examples:
|
||||
return_examples.append(DOCYAML.dump([example]))
|
||||
return return_examples
|
||||
|
||||
def _raw_html_block(self, data):
|
||||
html = self._render_html(source=data)
|
||||
return nodes.raw('', html['body'], format='html')
|
||||
|
||||
def _raw_fields(self, data, field_name=''):
|
||||
body = nodes.field_body()
|
||||
if isinstance(data, list):
|
||||
for item in data:
|
||||
body.append(self._raw_html_block(data=item))
|
||||
else:
|
||||
body.append(self._raw_html_block(data=data))
|
||||
|
||||
field = nodes.field()
|
||||
field.append(nodes.field_name(text=field_name))
|
||||
field.append(body)
|
||||
return field
|
||||
|
||||
@staticmethod
|
||||
def _literal_block(data, language='yaml', dump_data=True):
|
||||
if dump_data:
|
||||
literal = nodes.literal_block(
|
||||
text=DOCYAML.dump(data)
|
||||
)
|
||||
else:
|
||||
literal = nodes.literal_block(text=data)
|
||||
literal['language'] = 'yaml'
|
||||
return literal
|
||||
|
||||
@staticmethod
|
||||
def _section_block(title, text=None):
|
||||
section = nodes.section(
|
||||
title,
|
||||
nodes.title(text=title),
|
||||
ids=[nodes.make_id('-'.join(title))],
|
||||
)
|
||||
if text:
|
||||
section_body = nodes.field_body()
|
||||
section_body.append(nodes.paragraph(text=text))
|
||||
section.append(section_body)
|
||||
|
||||
return section
|
||||
|
||||
def _yaml_section(self, to_yaml_data, section_title, section_text=None):
|
||||
yaml_section = self._section_block(
|
||||
title=section_title,
|
||||
text=section_text
|
||||
)
|
||||
yaml_section.append(self._literal_block(data=to_yaml_data))
|
||||
return yaml_section
|
||||
|
||||
def _run_role(self, role):
|
||||
section = self._section_block(
|
||||
title="Role Documentation",
|
||||
text='Welcome to the "{}" role documentation.'.format(
|
||||
os.path.basename(role)
|
||||
),
|
||||
)
|
||||
|
||||
molecule_defaults = None
|
||||
abspath_role = os.path.dirname(os.path.abspath(role))
|
||||
molecule_shared_file = os.path.join(
|
||||
os.path.dirname(abspath_role), ".config/molecule/config.yml"
|
||||
)
|
||||
|
||||
if os.path.exists(molecule_shared_file):
|
||||
with open(molecule_shared_file) as msf:
|
||||
molecule_defaults = DOCYAML.load(msf.read())
|
||||
|
||||
defaults_file = os.path.join(role, "defaults", "main.yml")
|
||||
if os.path.exists(defaults_file):
|
||||
with open(defaults_file) as f:
|
||||
role_defaults = DOCYAML.load(f.read())
|
||||
section.append(
|
||||
self._yaml_section(
|
||||
to_yaml_data=role_defaults,
|
||||
section_title="Role Defaults",
|
||||
section_text="This section highlights all of the defaults"
|
||||
' and variables set within the "{}"'
|
||||
" role.".format(os.path.basename(role)),
|
||||
)
|
||||
)
|
||||
|
||||
vars_path = os.path.join(role, "vars")
|
||||
if os.path.exists(vars_path):
|
||||
for v_file in os.listdir(vars_path):
|
||||
vars_file = os.path.join(vars_path, v_file)
|
||||
with open(vars_file) as f:
|
||||
vars_values = DOCYAML.load(f.read())
|
||||
section.append(
|
||||
self._yaml_section(
|
||||
to_yaml_data=vars_values,
|
||||
section_title="Role Variables: {}".format(v_file),
|
||||
)
|
||||
)
|
||||
|
||||
test_list = nodes.field_list()
|
||||
test_section = self._section_block(
|
||||
title="Molecule Scenarios",
|
||||
text='Molecule is being used to test the "{}" role. The'
|
||||
" following section highlights the drivers in service"
|
||||
" and provides an example playbook showing how the role"
|
||||
" is leveraged.".format(os.path.basename(role)),
|
||||
)
|
||||
|
||||
molecule_path = os.path.join(role, "molecule")
|
||||
if os.path.exists(molecule_path):
|
||||
for test in os.listdir(molecule_path):
|
||||
molecule_section = self._section_block(
|
||||
title="Scenario: {}".format(test)
|
||||
)
|
||||
molecule_file = os.path.join(molecule_path, test, "molecule.yml")
|
||||
if not os.path.exists(molecule_file):
|
||||
continue
|
||||
|
||||
with open(molecule_file) as f:
|
||||
molecule_conf = DOCYAML.load(f.read())
|
||||
|
||||
# if molecule.yml file from the scenarios, we get the
|
||||
# information from the molecule shared configuration file.
|
||||
if not molecule_conf:
|
||||
molecule_conf = molecule_defaults
|
||||
|
||||
# Now that we use a shared molecule configuration file, the
|
||||
# molecule.yml file in the role scenarios could be empty or
|
||||
# contains only overriding keys.
|
||||
driver_data = molecule_conf.get('driver',
|
||||
molecule_defaults.get('driver'))
|
||||
|
||||
if driver_data:
|
||||
molecule_section.append(
|
||||
nodes.field_name(text="Driver: {}".format(driver_data["name"]))
|
||||
)
|
||||
|
||||
options = driver_data.get("options")
|
||||
if options:
|
||||
molecule_section.append(
|
||||
self._yaml_section(
|
||||
to_yaml_data=options, section_title="Molecule Options"
|
||||
)
|
||||
)
|
||||
|
||||
platforms_data = molecule_conf.get('platforms',
|
||||
molecule_defaults.get('platforms'))
|
||||
|
||||
if platforms_data:
|
||||
molecule_section.append(
|
||||
self._yaml_section(
|
||||
to_yaml_data=platforms_data,
|
||||
section_title="Molecule Platform(s)",
|
||||
)
|
||||
)
|
||||
|
||||
default_playbook = [molecule_path, test, "converge.yml"]
|
||||
|
||||
provisioner_data = molecule_conf.get('provisioner',
|
||||
molecule_defaults.get('provisioner'))
|
||||
|
||||
if provisioner_data:
|
||||
inventory = provisioner_data.get('inventory')
|
||||
if inventory:
|
||||
molecule_section.append(
|
||||
self._yaml_section(
|
||||
to_yaml_data=inventory,
|
||||
section_title="Molecule Inventory",
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
converge = provisioner_data['playbooks']['converge']
|
||||
default_playbook = default_playbook[:-1] + [converge]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
molecule_playbook_path = os.path.join(*default_playbook)
|
||||
|
||||
with open(molecule_playbook_path) as f:
|
||||
molecule_playbook = DOCYAML.load(f.read())
|
||||
molecule_section.append(
|
||||
self._yaml_section(
|
||||
to_yaml_data=molecule_playbook,
|
||||
section_title="Example {} playbook".format(test),
|
||||
)
|
||||
)
|
||||
test_list.append(molecule_section)
|
||||
else:
|
||||
test_section.append(test_list)
|
||||
section.append(test_section)
|
||||
|
||||
self.run_returns.append(section)
|
||||
|
||||
# Document any libraries nested within the role
|
||||
library_path = os.path.join(role, "library")
|
||||
if os.path.exists(library_path):
|
||||
self.options['documentation'] = True
|
||||
self.options['examples'] = True
|
||||
for lib in os.listdir(library_path):
|
||||
if lib.endswith(".py"):
|
||||
self._run_module(
|
||||
module=self.load_module(
|
||||
filename=os.path.join(library_path, lib)
|
||||
),
|
||||
module_title="Embedded module: {}".format(lib),
|
||||
example_title="Examples for embedded module",
|
||||
)
|
||||
|
||||
def _run_module(self, module, module_title="Module Documentation",
|
||||
example_title="Example Tasks"):
|
||||
if self.options.get('documentation'):
|
||||
docs = self.build_documentation(module=module)
|
||||
self.run_returns.append(
|
||||
self.make_node(
|
||||
title=module_title,
|
||||
contents=docs
|
||||
)
|
||||
)
|
||||
|
||||
if self.options.get('examples'):
|
||||
examples = self.build_examples(module=module)
|
||||
self.run_returns.append(
|
||||
self.make_node(
|
||||
title=example_title,
|
||||
contents=examples,
|
||||
content_type='yaml'
|
||||
)
|
||||
)
|
||||
|
||||
def run(self):
|
||||
self.run_returns = list()
|
||||
|
||||
if self.options.get('module'):
|
||||
module = self.load_module(filename=self.options['module'])
|
||||
self._run_module(module=module)
|
||||
|
||||
if self.options.get('role'):
|
||||
self._run_role(role=self.options['role'])
|
||||
|
||||
return self.run_returns
|
||||
|
||||
|
||||
def setup(app):
|
||||
classes = [
|
||||
AnsibleAutoPluginDirective,
|
||||
]
|
||||
for directive_class in classes:
|
||||
app.add_directive(directive_class.directive_name, directive_class)
|
||||
|
||||
return {'version': '0.2'}
|
@ -1,212 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from glob import glob
|
||||
import os
|
||||
import yaml
|
||||
|
||||
DEFAULT_METADATA = {
|
||||
'name': 'Unnamed',
|
||||
'description': 'No description',
|
||||
'groups': [],
|
||||
}
|
||||
|
||||
|
||||
def get_validation_metadata(validation, key):
|
||||
"""Returns metadata dictionary"""
|
||||
try:
|
||||
return validation['vars']['metadata'][key]
|
||||
except KeyError:
|
||||
return DEFAULT_METADATA.get(key)
|
||||
|
||||
|
||||
def get_include_role(validation):
|
||||
"""Returns Included Role"""
|
||||
try:
|
||||
if 'tasks' in validation:
|
||||
return validation['tasks'][0]['include_role']['name']
|
||||
return validation['roles'][0]
|
||||
except KeyError:
|
||||
return list()
|
||||
|
||||
|
||||
def get_remaining_metadata(validation):
|
||||
try:
|
||||
return {k: v for k, v in validation['vars']['metadata'].items()
|
||||
if k not in ['name', 'description', 'groups']}
|
||||
except KeyError:
|
||||
return dict()
|
||||
|
||||
|
||||
def get_validation_parameters(validation):
|
||||
"""Returns parameters"""
|
||||
try:
|
||||
return {k: v for k, v in validation['vars'].items()
|
||||
if k != 'metadata'}
|
||||
except KeyError:
|
||||
return dict()
|
||||
|
||||
|
||||
def build_summary(group, validations):
|
||||
"""Creates validations documentation contents by group"""
|
||||
entries = [
|
||||
"* :ref:`{}`: {}".format(group + '_' + validation['id'],
|
||||
validation['name'])
|
||||
for validation in validations
|
||||
]
|
||||
with open('doc/source/validations-{}.rst'.format(group), 'w') as f:
|
||||
f.write("\n".join(entries))
|
||||
f.write("\n")
|
||||
|
||||
|
||||
def format_dict(my_dict):
|
||||
return ''.join(['\n\n - **{}**: {}'.format(key, value)
|
||||
for key, value in my_dict.items()])
|
||||
|
||||
|
||||
def role_doc_entry(role_name, local_roles):
|
||||
"""Generates Documentation entry
|
||||
|
||||
If the included role isn't hosted on tripleo-validations, we point to the
|
||||
validations-common role documentation. Otherwise, it generates a classical
|
||||
local toctree.
|
||||
"""
|
||||
local_role_doc = (".. toctree::\n\n"
|
||||
" roles/role-{}".format(role_name))
|
||||
doc_base_url = "https://docs.openstack.org/validations-common/latest/roles"
|
||||
external_role = \
|
||||
("- `{role} <{baseurl}/role-{role}.html>`_ "
|
||||
"from `openstack/validations-common "
|
||||
"<https://opendev.org/openstack/validations-common>`_"
|
||||
"".format(role=role_name,
|
||||
baseurl=doc_base_url))
|
||||
|
||||
if role_name not in local_roles:
|
||||
return external_role
|
||||
return local_role_doc
|
||||
|
||||
|
||||
def build_detail(group, validations, local_roles):
|
||||
entries = ['{}\n{}\n'.format(group, len(group) * '=')]
|
||||
entries = entries + [
|
||||
""".. _{label}:
|
||||
|
||||
{title}
|
||||
{adornment}
|
||||
|
||||
{name}.
|
||||
|
||||
{desc}
|
||||
|
||||
- **hosts**: {hosts}
|
||||
- **groups**: {groups}
|
||||
- **parameters**:{parameters}
|
||||
- **roles**: {roles}
|
||||
|
||||
Role documentation
|
||||
|
||||
{roledoc}
|
||||
"""
|
||||
.format(label=(group + '_' + validation['id']),
|
||||
title=validation['id'],
|
||||
adornment=(len(validation['id']) * '-'),
|
||||
name=validation['name'],
|
||||
desc=validation['description'],
|
||||
groups=', '.join(validation['groups']),
|
||||
hosts=validation['hosts'],
|
||||
parameters=format_dict(validation['parameters']),
|
||||
roles=validation['roles'],
|
||||
roledoc=role_doc_entry(validation['roles'], local_roles)
|
||||
)
|
||||
for validation in validations]
|
||||
with open('doc/source/validations-{}-details.rst'.format(group), 'w') as f:
|
||||
f.write("\n".join(entries))
|
||||
|
||||
|
||||
def build_groups_detail(groups):
|
||||
entries = [
|
||||
"""
|
||||
**{group}**:
|
||||
|
||||
*{desc}*
|
||||
|
||||
.. include:: {link}
|
||||
"""
|
||||
.format(group=grp.capitalize(),
|
||||
link="validations-{}.rst".format(grp),
|
||||
desc=desc[0].get('description', None),
|
||||
)
|
||||
for grp, desc in sorted(groups.items())]
|
||||
with open('doc/source/validations-groups.rst', 'w') as f:
|
||||
f.write("\n".join(entries))
|
||||
|
||||
|
||||
def parse_groups_file():
|
||||
contents = {}
|
||||
groups_file_path = os.path.abspath('groups.yaml')
|
||||
|
||||
if os.path.exists(groups_file_path):
|
||||
with open(groups_file_path, "r") as grps:
|
||||
contents = yaml.safe_load(grps)
|
||||
|
||||
return contents
|
||||
|
||||
|
||||
def get_groups():
|
||||
# Seed it with the known groups from groups.yaml file.
|
||||
groups = set()
|
||||
contents = parse_groups_file()
|
||||
|
||||
for group_name in contents.keys():
|
||||
groups.add(group_name)
|
||||
|
||||
return groups, contents
|
||||
|
||||
|
||||
def get_local_roles(path):
|
||||
"""Returns a list of local Ansible Roles"""
|
||||
return next(os.walk(path))[1]
|
||||
|
||||
|
||||
def setup(app):
|
||||
group_name, group_info = get_groups()
|
||||
build_groups_detail(group_info)
|
||||
|
||||
local_roles = get_local_roles(os.path.abspath('roles'))
|
||||
|
||||
validations = []
|
||||
for validation_path in sorted(glob('playbooks/*.yaml')):
|
||||
with open(validation_path) as f:
|
||||
loaded_validation = yaml.safe_load(f.read())[0]
|
||||
for group in get_validation_metadata(loaded_validation, 'groups'):
|
||||
group_name.add(group)
|
||||
validations.append({
|
||||
'hosts': loaded_validation['hosts'],
|
||||
'parameters': get_validation_parameters(loaded_validation),
|
||||
'id': os.path.splitext(
|
||||
os.path.basename(validation_path))[0],
|
||||
'name': get_validation_metadata(loaded_validation, 'name'),
|
||||
'groups': get_validation_metadata(loaded_validation, 'groups'),
|
||||
'description': get_validation_metadata(loaded_validation,
|
||||
'description'),
|
||||
'metadata': get_remaining_metadata(loaded_validation),
|
||||
'roles': get_include_role(loaded_validation)
|
||||
})
|
||||
|
||||
for group in group_name:
|
||||
validations_in_group = [validation for validation
|
||||
in validations
|
||||
if group in validation['groups']]
|
||||
build_detail(group, validations_in_group, local_roles)
|
||||
build_summary(group, validations_in_group)
|
@ -1,85 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from unittest import mock
|
||||
|
||||
# Add the project
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
# Add the extensions
|
||||
sys.path.insert(0, os.path.join(os.path.abspath('.'), '_exts'))
|
||||
|
||||
# -- General configuration ----------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'generate_validations_doc',
|
||||
'ansible-autodoc',
|
||||
'openstackdocstheme'
|
||||
]
|
||||
|
||||
# autodoc generation is a bit aggressive and a nuisance when doing heavy
|
||||
# text edit cycles.
|
||||
# execute "export SPHINX_DEBUG=1" in your terminal to disable
|
||||
|
||||
# Mocking imports that could cause build failure
|
||||
autodoc_mock_imports = ['ansible']
|
||||
sys.modules['ansible.module_utils.basic'] = mock.Mock()
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
copyright = '2019, OpenStack Foundation'
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
add_module_names = True
|
||||
|
||||
# -- Options for HTML output --------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. Major themes that come with
|
||||
# Sphinx are currently 'default' and 'sphinxdoc'.
|
||||
# html_theme_path = ["."]
|
||||
# html_theme = '_theme'
|
||||
# html_static_path = ['static']
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'tripleo-validationsdoc'
|
||||
html_theme = 'openstackdocs'
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass
|
||||
# [howto/manual]).
|
||||
latex_documents = [
|
||||
('index',
|
||||
'tripleo-validations.tex',
|
||||
'tripleo-validations Documentation',
|
||||
'OpenStack Foundation', 'manual'),
|
||||
]
|
||||
|
||||
# openstackdocstheme options
|
||||
openstackdocs_repo_name = 'openstack/tripleo-validations'
|
||||
openstackdocs_bug_project = 'tripleo'
|
||||
openstackdocs_bug_tag = 'documentation'
|
@ -1,5 +0,0 @@
|
||||
============================
|
||||
So You Want to Contribute...
|
||||
============================
|
||||
|
||||
.. include:: ../../../CONTRIBUTING.rst
|
@ -1,810 +0,0 @@
|
||||
Developer's Guide
|
||||
=================
|
||||
|
||||
Writing Validations
|
||||
-------------------
|
||||
|
||||
All validations are written in standard Ansible with a couple of extra
|
||||
meta-data to provide information to the validation framework.
|
||||
|
||||
For people not familiar with Ansible, get started with their `excellent
|
||||
documentation <https://docs.ansible.com/ansible/>`_.
|
||||
|
||||
After the generic explanation on writing validations is a couple of concrete
|
||||
examples.
|
||||
|
||||
Directory Structure
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
All validations consist of an Ansible role located in the ``roles`` directory
|
||||
and a playbook located in the ``playbooks`` directory.
|
||||
|
||||
- the ``playbooks`` one contains all the validations playbooks you can run;
|
||||
- the ``lookup_plugins`` one is for custom Ansible look up plugins available
|
||||
to the validations;
|
||||
- the ``library`` one is for custom Ansible modules available to the
|
||||
validations;
|
||||
- the ``roles`` one contains all the necessary Ansible roles to validate your
|
||||
TripleO deployment;
|
||||
|
||||
Here is what the tree looks like::
|
||||
|
||||
playbooks/
|
||||
├── first_validation.yaml
|
||||
├── second_validation.yaml
|
||||
├── third_validation.yaml
|
||||
└── etc...
|
||||
library/
|
||||
├── another_module.py
|
||||
├── some_module.py
|
||||
└── etc...
|
||||
lookup_plugins/
|
||||
├── one_lookup_plugin.py
|
||||
├── another_lookup_plugin.py
|
||||
└── etc...
|
||||
roles
|
||||
├── first_role
|
||||
├── second_role
|
||||
└── etc...
|
||||
|
||||
|
||||
Sample Validation
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Each validation is an Ansible playbook located in the ``playbooks`` directory
|
||||
calling his own Ansible role located in the ``roles`` directory. Each playbook
|
||||
have some metadata. Here is what a minimal validation would look like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
---
|
||||
- hosts: undercloud
|
||||
vars:
|
||||
metadata:
|
||||
name: Hello World
|
||||
description: This validation prints Hello World!
|
||||
roles:
|
||||
- hello-world
|
||||
|
||||
It should be saved as ``playbooks/hello_world.yaml``.
|
||||
|
||||
As shown here, the validation playbook requires three top-level directives:
|
||||
``hosts``, ``vars -> metadata`` and ``roles``.
|
||||
|
||||
``hosts`` specify which nodes to run the validation on. Based on the
|
||||
``hosts.sample`` structure, the options can be ``all`` (run on all nodes),
|
||||
``undercloud``, ``allovercloud`` (all overcloud nodes), ``controller`` and
|
||||
``compute``.
|
||||
|
||||
The ``vars`` section serves for storing variables that are going to be
|
||||
available to the Ansible playbook. The validations API uses the ``metadata``
|
||||
section to read each validation's name and description. These values are then
|
||||
reported by the API.
|
||||
|
||||
The validations can be grouped together by specifying a ``groups`` metadata.
|
||||
Groups function similar to tags and a validation can thus be part of many
|
||||
groups. Here is, for example, how to have a validation be part of the
|
||||
`pre-deployment` and `hardware` groups:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
metadata:
|
||||
groups:
|
||||
- pre-deployment
|
||||
- hardware
|
||||
|
||||
The validations can be categorized by technical domain and can belong to one or
|
||||
multiple categories. The categorization is depending on what the validation is
|
||||
checking on the hosts. For example, if a validation checks some networking
|
||||
related configuration and needs to get configuration items from the
|
||||
undercloud.conf file, you will have to put `networking` and `undercloud-config` in
|
||||
the ``categories`` metadata key:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
metadata:
|
||||
groups:
|
||||
- pre-deployment
|
||||
- hardware
|
||||
categories:
|
||||
- networking
|
||||
- undercloud-config
|
||||
|
||||
.. note::
|
||||
|
||||
The ``categories`` are not restricted to a list as for the ``groups``
|
||||
present in the ``groups.yaml`` file, but it could be for example:
|
||||
|
||||
* ``networking``
|
||||
* ``compute``
|
||||
* ``baremetal``
|
||||
* ``provisioning``
|
||||
* ``database``
|
||||
* ``os``
|
||||
* ``system``
|
||||
* ``packaging``
|
||||
* ``kernel``
|
||||
* ``security``
|
||||
* ``tls-everywhere``
|
||||
* ``dns``
|
||||
* ``dhcp``
|
||||
* ``dnsmasq``
|
||||
* ``webserver``
|
||||
* ``storage``
|
||||
* ``ha``
|
||||
* ``clustering``
|
||||
* ``undercloud-config``
|
||||
* etc ...
|
||||
|
||||
The validations should be linked to a product. Every validations hosted in
|
||||
``tripleo-validations`` should get at least ``tripleo`` in the ``products``
|
||||
metadata key:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
metadata:
|
||||
groups:
|
||||
- pre-deployment
|
||||
- hardware
|
||||
categories:
|
||||
- networking
|
||||
- undercloud-config
|
||||
products:
|
||||
- tripleo
|
||||
|
||||
``roles`` include the Ansible role, which contains all the tasks to run,
|
||||
associated to this validation. Each task is a YAML dictionary that must at
|
||||
minimum contain a name and a module to use. Module can be any module that ships
|
||||
with Ansible or any of the custom ones in the ``library`` directory.
|
||||
|
||||
The `Ansible documentation on playbooks
|
||||
<https://docs.ansible.com/ansible/playbooks.html>`__ provides more detailed
|
||||
information.
|
||||
|
||||
Ansible Inventory
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Dynamic inventory
|
||||
+++++++++++++++++
|
||||
|
||||
Tripleo-validations ships with a `dynamic inventory
|
||||
<https://docs.ansible.com/ansible/intro_dynamic_inventory.html>`__, which
|
||||
contacts the various OpenStack services to provide the addresses of the
|
||||
deployed nodes as well as the undercloud.
|
||||
|
||||
Just pass ``-i /usr/bin/tripleo-ansible-inventory`` to ``ansible-playbook``
|
||||
command.
|
||||
|
||||
As the playbooks are located in their own directory and not at the same level as
|
||||
the ``roles``, ``callback_plugins``, ``library`` and ``lookup_plugins``
|
||||
directories, you will have to export some Ansible variables first:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd tripleo-validations/
|
||||
$ export ANSIBLE_CALLBACK_PLUGINS="${PWD}/callback_plugins"
|
||||
$ export ANSIBLE_ROLES_PATH="${PWD}/roles"
|
||||
$ export ANSIBLE_LOOKUP_PLUGINS="${PWD}/lookup_plugins"
|
||||
$ export ANSIBLE_LIBRARY="${PWD}/library"
|
||||
|
||||
$ ansible-playbook -i /usr/bin/tripleo-ansible-inventory playbooks/hello_world.yaml
|
||||
|
||||
Hosts file
|
||||
++++++++++
|
||||
|
||||
When more flexibility than what the current dynamic inventory provides is
|
||||
needed or when running validations against a host that hasn't been deployed via
|
||||
heat (such as the ``prep`` validations), it is possible to write a custom hosts
|
||||
inventory file. It should look something like this:
|
||||
|
||||
.. code-block:: INI
|
||||
|
||||
[undercloud]
|
||||
undercloud.example.com
|
||||
|
||||
[allovercloud:children]
|
||||
controller
|
||||
compute
|
||||
|
||||
[controller]
|
||||
controller.example.com
|
||||
|
||||
[compute]
|
||||
compute-1.example.com
|
||||
compute-2.example.com
|
||||
|
||||
[all:vars]
|
||||
ansible_ssh_user=stack
|
||||
ansible_sudo=true
|
||||
|
||||
It will have a ``[group]`` section for each role (``undercloud``,
|
||||
``controller``, ``compute``) listing all the nodes belonging to that group. It
|
||||
is also possible to create a group from other groups as done with
|
||||
``[allovercloud:children]`` in the above example. If a validation specifies
|
||||
``hosts: overcloud``, it will be run on any node that belongs to the
|
||||
``compute`` or ``controller`` groups. If a node happens to belong to both, the
|
||||
validation will only be run once.
|
||||
|
||||
Lastly, there is an ``[all:vars]`` section where to configure certain
|
||||
Ansible-specific options.
|
||||
|
||||
``ansible_ssh_user`` will specify the user Ansible should SSH as. If that user
|
||||
does not have root privileges, it is possible to instruct it to use ``sudo`` by
|
||||
setting ``ansible_sudo`` to ``true``.
|
||||
|
||||
Learn more at the `Ansible documentation page for the Inventory
|
||||
<https://docs.ansible.com/ansible/intro_inventory.html>`__
|
||||
|
||||
Custom Modules
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
In case the `available Ansible modules
|
||||
<https://docs.ansible.com/ansible/modules_by_category.html>`__ don't cover your
|
||||
needs, it is possible to write your own. Modules belong to the
|
||||
``library`` directory.
|
||||
|
||||
Here is a sample module that will always fail
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
#!/usr/bin/env python
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
if __name__ == '__main__':
|
||||
module = AnsibleModule(argument_spec={})
|
||||
module.fail_json(msg="This module always fails.")
|
||||
|
||||
Save it as ``library/my_module.py`` and use it in a validation like
|
||||
so:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
tasks:
|
||||
... # some tasks
|
||||
- name: Running my custom module
|
||||
my_module:
|
||||
... # some other tasks
|
||||
|
||||
The name of the module in the validation ``my_module`` must match the file name
|
||||
(without extension): ``my_module.py``.
|
||||
|
||||
The custom modules can accept parameters and do more complex reporting. Please
|
||||
refer to the guide on writing modules in the Ansible documentation.
|
||||
|
||||
.. Warning::
|
||||
|
||||
Each custom module must be accompanied by the most complete unit tests
|
||||
possible.
|
||||
|
||||
Learn more at the `Ansible documentation page about writing custom modules
|
||||
<https://docs.ansible.com/ansible/developing_modules.html>`__.
|
||||
|
||||
Running a validation
|
||||
--------------------
|
||||
|
||||
Running the validations require ansible and a set of nodes to run them against.
|
||||
These nodes need to be reachable from the operator's machine and need to have
|
||||
an account it can ssh to and perform passwordless sudo.
|
||||
|
||||
The nodes need to be present in the static inventory file or available from the
|
||||
dynamic inventory script depending on which one the operator chooses to use.
|
||||
Check which nodes are available with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source stackrc
|
||||
$ tripleo-ansible-inventory --list
|
||||
|
||||
In general, Ansible and the validations will be located on the *undercloud*,
|
||||
because it should have connectivity to all the *overcloud* nodes is already set
|
||||
up to SSH to them.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source ~/stackrc
|
||||
$ tripleo-validation.py
|
||||
usage: tripleo-validation.py [-h] [--inventory INVENTORY]
|
||||
[--extra-vars EXTRA_VARS [EXTRA_VARS ...]]
|
||||
[--validation <validation_id>[,<validation_id>,...]]
|
||||
[--group <group>[,<group>,...]] [--quiet]
|
||||
[--validation-dir VALIDATION_DIR]
|
||||
[--ansible-base-dir ANSIBLE_BASE_DIR]
|
||||
[--output-log OUTPUT_LOG]
|
||||
{run,list,show}
|
||||
|
||||
$ tripleo-validation.py run --validation <validation_name>
|
||||
|
||||
|
||||
Example: Verify Undercloud RAM requirements
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The Undercloud has a requirement of 16GB RAM. Let's write a validation
|
||||
that verifies this is indeed the case before deploying anything.
|
||||
|
||||
Let's create ``playbooks/undercloud-ram.yaml`` and put some metadata
|
||||
in there:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
---
|
||||
- hosts: undercloud
|
||||
vars:
|
||||
metadata:
|
||||
name: Minimum RAM required on the undercloud
|
||||
description: >
|
||||
Make sure the undercloud has enough RAM.
|
||||
groups:
|
||||
- prep
|
||||
- pre-introspection
|
||||
categories:
|
||||
- os
|
||||
- system
|
||||
- ram
|
||||
products:
|
||||
- tripleo
|
||||
|
||||
|
||||
The ``hosts`` key will tell which server should the validation run on. The
|
||||
common values are ``undercloud``, ``overcloud`` (i.e. all overcloud nodes),
|
||||
``controller`` and ``compute`` (i.e. just the controller or the compute nodes).
|
||||
|
||||
The ``name`` and ``description`` metadata will show up in the API and the
|
||||
TripleO UI so make sure to put something meaningful there. The ``groups``
|
||||
metadata applies a tag to the validation and allows to group them together in
|
||||
order to perform group operations, such are running them all in one call.
|
||||
|
||||
Now let's include the Ansible role associated to this validation. Add this under
|
||||
the same indentation as ``hosts`` and ``vars``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
roles:
|
||||
- undercloud-ram
|
||||
|
||||
Now let's create the ``undercloud-ram`` Ansible role which will contain the
|
||||
necessary task(s) for checking if the Undercloud has the mininum amount of RAM
|
||||
required:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd tripleo-validations
|
||||
$ ansible-galaxy init --init-path=roles/ undercloud-ram
|
||||
- undercloud-ram was created successfully
|
||||
|
||||
The tree of the new created role should look like::
|
||||
|
||||
undercloud-ram/
|
||||
├── defaults
|
||||
│ └── main.yml
|
||||
├── meta
|
||||
│ └── main.yml
|
||||
├── tasks
|
||||
│ └── main.yml
|
||||
└── vars
|
||||
└── main.yml
|
||||
|
||||
Now let's add an Ansible task to test that it's all set up properly:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
$ cat <<EOF >> roles/undercloud-ram/tasks/main.yml
|
||||
- name: Test Output
|
||||
debug:
|
||||
msg: "Hello World!"
|
||||
EOF
|
||||
|
||||
When running it, it should output something like this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ /bin/run-validations.sh --validation-name undercloud-ram.yaml --ansible-default-callback
|
||||
|
||||
PLAY [undercloud] *********************************************************
|
||||
|
||||
TASK [Gathering Facts] ****************************************************
|
||||
ok: [undercloud]
|
||||
|
||||
TASK [undercloud-ram : Test Output] ***************************************
|
||||
ok: [undercloud] => {
|
||||
"msg": "Hello World!"
|
||||
}
|
||||
|
||||
PLAY RECAP ****************************************************************
|
||||
undercloud : ok=2 changed=0 unreachable=0 failed=0
|
||||
|
||||
|
||||
If you run into an issue where the validation isn't found, it may be because the
|
||||
run-validations.sh script is searching for it in the path where the packaging
|
||||
installs validations. For development, export an environment variable named
|
||||
VALIDATIONS_BASEDIR with the value of base path of your git repo:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd /path/to/git/repo
|
||||
$ export VALIDATIONS_BASEDIR=$(pwd)
|
||||
|
||||
Writing the full validation code is quite easy in this case because Ansible has
|
||||
done all the hard work for us already. We can use the ``ansible_memtotal_mb``
|
||||
fact to get the amount of RAM (in megabytes) the tested server currently has.
|
||||
For other useful values, run ``ansible -i /usr/bin/tripleo-ansible-inventory
|
||||
undercloud -m setup``.
|
||||
|
||||
So, let's replace the hello world task with a real one:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
tasks:
|
||||
- name: Verify the RAM requirements
|
||||
fail: msg="The RAM on the undercloud node is {{ ansible_memtotal_mb }} MB, the minimal recommended value is 16 GB."
|
||||
failed_when: "({{ ansible_memtotal_mb }}) < 16000"
|
||||
|
||||
Running this, we see:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
TASK: [Verify the RAM requirements] *******************************************
|
||||
failed: [localhost] => {"failed": true, "failed_when_result": true}
|
||||
msg: The RAM on the undercloud node is 8778 MB, the minimal recommended value is 16 GB.
|
||||
|
||||
|
||||
Because our Undercloud node really does not have enough RAM. Your mileage may
|
||||
vary.
|
||||
|
||||
Either way, the validation works and reports the lack of RAM properly!
|
||||
|
||||
``failed_when`` is the real hero here: it evaluates an Ansible expression (e.g.
|
||||
does the node have more than 16 GB of RAM) and fails when it's evaluated as
|
||||
true.
|
||||
|
||||
The ``fail`` line right above it lets us print a custom error in case of
|
||||
a failure. If the task succeeds (because we do have enough RAM), nothing will
|
||||
be printed out.
|
||||
|
||||
Now, we're almost done, but there are a few things we can do to make this nicer
|
||||
on everybody.
|
||||
|
||||
First, let's hoist the minimum RAM requirement into a variable. That way we'll
|
||||
have one place where to change it if we need to and we'll be able to test the
|
||||
validation better as well!
|
||||
|
||||
So, let's call the variable ``minimum_ram_gb`` and set it to ``16``. Do this in
|
||||
the ``vars`` section:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
vars:
|
||||
metadata:
|
||||
name: ...
|
||||
description: ...
|
||||
groups: ...
|
||||
categories: ...
|
||||
products: ...
|
||||
minimum_ram_gb: 16
|
||||
|
||||
Make sure it's on the same indentation level as ``metadata``.
|
||||
|
||||
Then, update ``failed_when`` like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
failed_when: "({{ ansible_memtotal_mb }}) < {{ minimum_ram_gb|int * 1024 }}"
|
||||
|
||||
And ``fail`` like so:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
fail: msg="The RAM on the undercloud node is {{ ansible_memtotal_mb }} MB, the minimal recommended value is {{ minimum_ram_gb|int * 1024 }} MB."
|
||||
|
||||
And re-run it again to be sure it's still working.
|
||||
|
||||
One benefit of using a variable instead of a hardcoded value is that we can now
|
||||
change the value without editing the yaml file!
|
||||
|
||||
Let's do that to test both success and failure cases.
|
||||
|
||||
This should succeed but saying the RAM requirement is 1 GB:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ansible-playbook -i /usr/bin/tripleo-ansible-inventory playbooks/undercloud-ram.yaml -e minimum_ram_gb=1
|
||||
|
||||
And this should fail by requiring much more RAM than is necessary:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ansible-playbook -i /usr/bin/tripleo-ansible-inventory playbooks/undercloud-ram.yaml -e minimum_ram_gb=128
|
||||
|
||||
(the actual values may be different in your configuration -- just make sure one
|
||||
is low enough and the other too high)
|
||||
|
||||
And that's it! The validation is now finished and you can start using it in
|
||||
earnest.
|
||||
|
||||
Create a new role with automation
|
||||
---------------------------------
|
||||
|
||||
The role addition process is also automated using ansible. If ansible is
|
||||
available on the development workstation change directory to the root of
|
||||
the `tripleo-validations` repository and run the the following command which
|
||||
will perform the basic tasks noted above.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd tripleo-validations/
|
||||
$ export ANSIBLE_ROLES_PATH="${PWD}/roles"
|
||||
$ ansible-playbook -i localhost, role-addition.yml -e validation_init_role_name=${NEWROLENAME}
|
||||
|
||||
The new role will be created in `tripleo-validations/roles/` from a skeleton and one playbook
|
||||
will be added in `tripleo-validations/playbooks/`.
|
||||
|
||||
It will also add a new **job** entry into the `zuul.d/molecule.yaml`.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- job:
|
||||
files:
|
||||
- ^roles/${NEWROLENAME}/.*
|
||||
- ^tests/prepare-test-host.yml
|
||||
- ^ci/playbooks/pre.yml
|
||||
- ^ci/playbooks/run.yml
|
||||
- ^molecule-requirements.txt
|
||||
name: tripleo-validations-centos-8-molecule-${NEWROLENAME}
|
||||
parent: tripleo-validations-centos-8-base
|
||||
vars:
|
||||
tripleo_validations_role_name: ${NEWROLENAME}
|
||||
|
||||
|
||||
And the **job** name will be added into the check and gate section at the top
|
||||
of the `molecule.yaml` file.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- project:
|
||||
check:
|
||||
jobs:
|
||||
- tripleo-validations-centos-8-molecule-${NEWROLENAME}
|
||||
gate:
|
||||
jobs:
|
||||
- tripleo-validations-centos-8-molecule-${NEWROLENAME}
|
||||
|
||||
.. note::
|
||||
|
||||
Adding `Molecule` test is highly recommended but remains **optional**. Some
|
||||
validations might require a real OpenStack Infrastructure in order to run
|
||||
them and this, by definition, will make the `Molecule` test very complex to
|
||||
implement.
|
||||
|
||||
If you are in this case when creating a new validation, please
|
||||
add `-e validation_init_molecule=false` to the above `ansible-playbook`
|
||||
command. No molecule directory and no CI Job will be created.
|
||||
|
||||
Finally it will add a role documentation file at
|
||||
`doc/source/roles/role-${NEWROLENAME}.rst`. This file will need to contain
|
||||
a title, a literal include of the defaults yaml and a literal include of
|
||||
the molecule playbook, or playbooks, used to test the role, which is noted
|
||||
as an "example" playbook.
|
||||
|
||||
You will now be able to develop your new validation!
|
||||
|
||||
Developing your own molecule test(s)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The role addition process will create a default Molecule scenario from the
|
||||
skeleton. By using Molecule, you will be able to test it locally and of course
|
||||
it will be executed during the CI checks.
|
||||
|
||||
In your role directory, you will notice a `molecule` folder which contains a
|
||||
single `Scenario` called `default`. Scenarios are the starting point for a lot
|
||||
of powerful functionality that Molecule offers. A scenario is a kind of a test
|
||||
suite for your newly created role.
|
||||
|
||||
The Scenario layout
|
||||
+++++++++++++++++++
|
||||
|
||||
Within the `molecule/default` folder, you will find those files:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ls
|
||||
molecule.yml converge.yml prepare.yml verify.yml
|
||||
|
||||
* ``molecule.yml`` is the central configuration entrypoint for `Molecule`. With this
|
||||
file, you can configure each tool that `Molecule` will employ when testing
|
||||
your role.
|
||||
|
||||
.. note::
|
||||
|
||||
`Tripleo-validations` uses a global configuration file for `Molecule`.
|
||||
This file is located at the repository level (``tripleo-validations/.config/molecule/.config.yml``).
|
||||
and defines all the default values for all the ``molecule.yml``. By default,
|
||||
the role addition process will produce an empty ``molecule.yml`` inheriting
|
||||
this ``config.yml`` file. Any key defined in the role ``molecule.yml`` file
|
||||
will override values from the ``config.yml`` file.
|
||||
|
||||
But, if you want to override the default values set in the ``config.yml``
|
||||
file, you will have to redefine them completely in your ``molecule.yml``
|
||||
file. `Molecule` won't merge both configuration files and that's why you
|
||||
will have to redefine them completely.
|
||||
|
||||
* ``prepare.yml`` is the playbook file that contains everything you need to
|
||||
include before your test. It could include packages installation, file
|
||||
creation, whatever your need on the instance created by the driver.
|
||||
|
||||
* ``converge.yml`` is the playbook file that contains the call for you
|
||||
role. `Molecule` will invoke this playbook with ``ansible-playbook`` and run
|
||||
it against and instance created by the driver.
|
||||
|
||||
* ``verify.yml`` is the Ansible file used for testing as Ansible is the default
|
||||
``Verifier``. This allows you to write specific tests against the state of the
|
||||
container after your role has finished executing.
|
||||
|
||||
Inspecting the Global Molecule Configuration file
|
||||
+++++++++++++++++++++++++++++++++++++++++++++++++
|
||||
|
||||
As mentioned above, ``tripleo-validations`` uses a global configuration for
|
||||
Molecule.
|
||||
|
||||
.. literalinclude:: ../../../.config/molecule/config.yml
|
||||
:language: yaml
|
||||
|
||||
* The ``Driver`` provider: ``podman`` is the default. Molecule will use the
|
||||
driver to delegate the task of creating instances.
|
||||
* The ``Platforms`` definitions: Molecule relies on this to know which instances
|
||||
to create, name and to which group each instance
|
||||
belongs. ``Tripleo-validations`` uses ``CentOS 8 Stream image``.
|
||||
* The ``Provisioner``: Molecule only provides an Ansible provisioner. Ansible
|
||||
manages the life cycle of the instance based on this configuration.
|
||||
* The ``Scenario`` definition: Molecule relies on this configuration to control
|
||||
the scenario sequence order.
|
||||
* The ``Verifier`` framework. Molecule uses Ansible by default to provide a way
|
||||
to write specific stat checking tests (such as deployment smoke tests) on the
|
||||
target instance.
|
||||
|
||||
Local testing of new roles
|
||||
--------------------------
|
||||
|
||||
Local testing of new roles can be done in two ways:
|
||||
|
||||
* Via the script `scripts/run-local-test`,
|
||||
* or manually by following the procedure described below.
|
||||
|
||||
Running molecule tests with the script run-local-test
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This script will setup the local work environment to execute tests mimicking
|
||||
what Zuul does on a *CentOS 8* machine.
|
||||
|
||||
.. warning::
|
||||
|
||||
This script makes the assumption the executing user has the
|
||||
ability to escalate privileges and will modify the local system.
|
||||
|
||||
To use this script execute the following command.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd tripleo-validations
|
||||
$ ./scripts/run-local-test ${NEWROLENAME}
|
||||
|
||||
When using the `run-local-test` script, the TRIPLEO_JOB_ANSIBLE_ARGS
|
||||
environment variable can be used to pass arbitrary Ansible arguments.
|
||||
For example, the following shows how to use `--skip-tags` when testing
|
||||
a role with tags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export TRIPLEO_JOB_ANSIBLE_ARGS="--skip-tags tag_one,tag_two"
|
||||
$ ./scripts/run-local-test ${ROLENAME}
|
||||
|
||||
|
||||
Running molecule tests manually
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Role based testing with `molecule`_ can be executed directly from within
|
||||
the role directory.
|
||||
|
||||
.. note::
|
||||
|
||||
All tests require `Podman`_ for container based testing. If `Podman`_ is not
|
||||
available on the local workstation it will need to be installed prior to
|
||||
executing most molecule based tests.
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
The script `bindep-install`, in the **scripts** path, is available and will
|
||||
install all system dependencies.
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
Each molecule tests are configured to bind mount a read-only volume on the
|
||||
container where the tests are running:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
volumes:
|
||||
- /etc/ci/mirror_info.sh:/etc/ci/mirror_info.sh:ro
|
||||
|
||||
It is an OpenStack Zuul requirement for detecting if we are on a CI node. Of
|
||||
course, when running your `molecule`_ test on your workstation, it is going
|
||||
to fail because you don't have the empty `mirror_info.sh` script in the
|
||||
`/etc/ci/` directory. You can workaround this by creating it in your
|
||||
workstation or removing the volume key in the global configuration file for
|
||||
molecule.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo mkdir -p /etc/ci
|
||||
$ sudo touch /etc/ci/mirror_info.sh
|
||||
|
||||
|
||||
|
||||
Before running basic `molecule`_ tests, it is recommended to install all
|
||||
of the python dependencies in a virtual environment.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo dnf install python3 python3-virtualenv
|
||||
$ python3 -m virtualenv --system-site-packages "${HOME}/test-python"
|
||||
$ source "${HOME}/test-python/bin/activate"
|
||||
(test-python) $ python3 -m pip install "pip>=19.1.1" setuptools bindep --upgrade
|
||||
(test-python) $ scripts/./bindep-install
|
||||
(test-python) $ python3 -m pip install -r requirements.txt \
|
||||
-r test-requirements.txt \
|
||||
-r molecule-requirements.txt
|
||||
(test-python) $ ansible-galaxy install -fr ansible-collections-requirements.yml
|
||||
|
||||
|
||||
Now, it is important to install `validations-common` and `tripleo-ansible` as
|
||||
dependencies.
|
||||
|
||||
.. note::
|
||||
|
||||
`validation-common` contains Ansible Custom modules needed by
|
||||
`tripleo-validations` roles. That's the reason why we will need to clone it
|
||||
beforehand.
|
||||
|
||||
Cloning `tripleo-ansible` project is only necessary in order to run the
|
||||
`molecule` test(s) for the `image_serve` role. Otherwise, you probably won't
|
||||
need it.
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ cd tripleo-validations/
|
||||
$ for REPO in validations-common tripleo-ansible; do git clone https://opendev.org/openstack/${REPO} roles/roles.galaxy/${REPO}; done
|
||||
|
||||
|
||||
To run a basic `molecule`_ test, simply source the `ansible-test-env.rc`
|
||||
file from the project root, and then execute the following commands.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
(test-python) $ source ansible-test-env.rc
|
||||
(test-python) $ cd roles/${NEWROLENAME}/
|
||||
(test-python) $ molecule test --all
|
||||
|
||||
|
||||
If a role has more than one scenario, a specific scenario can be
|
||||
specified on the command line. Running specific scenarios will
|
||||
help provide developer feedback faster. To pass-in a scenario use
|
||||
the `--scenario-name` flag with the name of the desired scenario.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
(test-python) $ cd roles/${NEWROLENAME}/
|
||||
(test-python) $ molecule test --scenario-name ${EXTRA_SCENARIO_NAME}
|
||||
|
||||
|
||||
When debugging `molecule`_ tests its sometimes useful to use the
|
||||
`--debug` flag. This flag will provide extra verbose output about
|
||||
test being executed and running the environment.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
(test-python) $ molecule --debug test
|
||||
|
||||
.. _molecule: https://github.com/ansible-community/molecule
|
||||
.. _podman: https://podman.io/
|
@ -1,44 +0,0 @@
|
||||
About Group
|
||||
===========
|
||||
|
||||
For now, the validations are grouped by the deployment stage they should be run
|
||||
on. A validation can belong to multiple groups.
|
||||
|
||||
Adding a new group
|
||||
------------------
|
||||
|
||||
To add a new group, you will need to edit the ``groups.yaml`` file located in
|
||||
the root of the TripleO Validations directory:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
...
|
||||
pre-update:
|
||||
- description: >-
|
||||
Validations which try to validate your OpenStack deployment before you
|
||||
update it.
|
||||
...
|
||||
|
||||
And a new entry in the sphinx documentation index ``doc/source/index.rst``:
|
||||
|
||||
.. code-block:: RST
|
||||
|
||||
Existing validations
|
||||
====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
validations-no-op-details
|
||||
validations-prep-details
|
||||
validations-pre-introspection-details
|
||||
validations-pre-deployment-details
|
||||
validations-post-deployment-details
|
||||
...
|
||||
|
||||
Group list
|
||||
----------
|
||||
|
||||
Here is a list of groups and their associated validations.
|
||||
|
||||
.. include:: validations-groups.rst
|
@ -1,87 +0,0 @@
|
||||
.. tripleo-validations documentation master file, created by
|
||||
sphinx-quickstart on Tue Jul 9 22:26:36 2013.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
===============================================
|
||||
Welcome to tripleo-validations's documentation!
|
||||
===============================================
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
.. include:: ../../README.rst
|
||||
|
||||
Installation Guide
|
||||
==================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
installation
|
||||
|
||||
Contributing
|
||||
============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
contributing/contributing
|
||||
contributing/developer_guide
|
||||
|
||||
Existing Groups
|
||||
===============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
groups
|
||||
|
||||
Existing Validations
|
||||
====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
validations-no-op-details
|
||||
validations-prep-details
|
||||
validations-pre-introspection-details
|
||||
validations-pre-deployment-details
|
||||
validations-post-deployment-details
|
||||
validations-openshift-on-openstack-details
|
||||
validations-pre-upgrade-details
|
||||
validations-post-upgrade-details
|
||||
validations-pre-system-upgrade-details
|
||||
validations-post-system-upgrade-details
|
||||
validations-pre-undercloud-upgrade-details
|
||||
validations-post-undercloud-upgrade-details
|
||||
validations-pre-overcloud-prepare-details
|
||||
validations-post-overcloud-prepare-details
|
||||
validations-pre-overcloud-upgrade-details
|
||||
validations-post-overcloud-upgrade-details
|
||||
validations-pre-overcloud-converge-details
|
||||
validations-post-overcloud-converge-details
|
||||
validations-pre-ceph-details
|
||||
validations-post-ceph-details
|
||||
validations-pre-update-details
|
||||
validations-pre-update-prepare-details
|
||||
validations-pre-update-run-details
|
||||
validations-pre-update-converge-details
|
||||
validations-post-update-details
|
||||
validations-backup-and-restore-details
|
||||
|
||||
Existing Roles and Modules
|
||||
==========================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
roles
|
||||
modules
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`search`
|
@ -1,13 +0,0 @@
|
||||
Prerequisites
|
||||
=============
|
||||
|
||||
The TripleO validations requires Ansible ">=2.8,<2.0.0"::
|
||||
|
||||
$ sudo pip install 'ansible>=2.8,<2.10.0'
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
At the command line::
|
||||
|
||||
$ python3 -m pip install tripleo-validations
|
@ -1,9 +0,0 @@
|
||||
Documented modules in TripleO-Validations
|
||||
=========================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:glob:
|
||||
|
||||
modules/*
|
@ -1,14 +0,0 @@
|
||||
=================================
|
||||
Module - ceph_pools_pg_protection
|
||||
=================================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* ceph_pools_pg_protection
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/ceph_pools_pg_protection.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
==========================================
|
||||
Module - check_cpus_aligned_with_dpdk_nics
|
||||
==========================================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* check_cpus_aligned_with_dpdk_nics
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/check_cpus_aligned_with_dpdk_nics.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
======================
|
||||
Module - check_flavors
|
||||
======================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* check_flavors
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/check_flavors.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
=================================
|
||||
Module - check_ironic_boot_config
|
||||
=================================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* check_ironic_boot_config
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/check_ironic_boot_config.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
========================================
|
||||
Module - check_other_processes_pmd_usage
|
||||
========================================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* check_other_processes_pmd_usage
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/check_other_processes_pmd_usage.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
======================================
|
||||
Module - convert_range_to_numbers_list
|
||||
======================================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* convert_range_to_numbers_list
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/convert_range_to_numbers_list.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
================================
|
||||
Module - get_dpdk_nics_numa_info
|
||||
================================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* get_dpdk_nics_numa_info
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/get_dpdk_nics_numa_info.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
==================
|
||||
Module - icmp_ping
|
||||
==================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* icmp_ping
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/icmp_ping.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
=================
|
||||
Module - ip_range
|
||||
=================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* ip_range
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/ip_range.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
============================
|
||||
Module - network_environment
|
||||
============================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* network_environment
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/network_environment.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
================================
|
||||
Module - ovs_dpdk_pmd_cpus_check
|
||||
================================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* ovs_dpdk_pmd_cpus_check
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/ovs_dpdk_pmd_cpus_check.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
==================
|
||||
Module - pacemaker
|
||||
==================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* pacemaker
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/pacemaker.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
===================================
|
||||
Module - pmd_threads_siblings_check
|
||||
===================================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* pmd_threads_siblings_check
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/pmd_threads_siblings_check.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
=====================
|
||||
Module - switch_vlans
|
||||
=====================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* switch_vlans
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/switch_vlans.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
=============================
|
||||
Module - tripleo_haproxy_conf
|
||||
=============================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* tripleo_haproxy_conf
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/tripleo_haproxy_conf.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,14 +0,0 @@
|
||||
========================
|
||||
Module - verify_profiles
|
||||
========================
|
||||
|
||||
|
||||
This module provides for the following ansible plugin:
|
||||
|
||||
* verify_profiles
|
||||
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:module: library/verify_profiles.py
|
||||
:documentation: true
|
||||
:examples: true
|
@ -1,9 +0,0 @@
|
||||
Documented roles in TripleO-Validations
|
||||
=======================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:glob:
|
||||
|
||||
roles/*
|
@ -1,6 +0,0 @@
|
||||
====
|
||||
ceph
|
||||
====
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/ceph
|
@ -1,45 +0,0 @@
|
||||
=========================
|
||||
check_for_dangling_images
|
||||
=========================
|
||||
|
||||
--------------
|
||||
About the role
|
||||
--------------
|
||||
|
||||
Ansible role to check for dangling images
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
This role will be executed pre Update.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
No Dependencies
|
||||
|
||||
Example Playbook
|
||||
================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: servers
|
||||
roles:
|
||||
- { role: check_for_dangling_images, check_for_dangling_images_debug: true }
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat TripleO DFG:Upgrades**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_for_dangling_images
|
@ -1,6 +0,0 @@
|
||||
====================
|
||||
check_kernel_version
|
||||
====================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_kernel_version
|
@ -1,53 +0,0 @@
|
||||
============
|
||||
policy_file
|
||||
============
|
||||
|
||||
--------------
|
||||
About The Role
|
||||
--------------
|
||||
|
||||
This role will check if there is a file named Policy.yaml in the controlers.
|
||||
The file should be located at the manila's configuration folder in the container.
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
No Requirements.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
No dependencies.
|
||||
|
||||
Example Playbook
|
||||
================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: "{{ controller_rolename | default('Controller') }}"
|
||||
vars:
|
||||
metadata:
|
||||
name: Verify that keystone admin token is disabled
|
||||
description: |
|
||||
This validation checks that policy file of manilas configuration folder inside of the container,exists.
|
||||
groups:
|
||||
- post-deployment
|
||||
categories:
|
||||
- controller
|
||||
products:
|
||||
- tripleo
|
||||
manilas_policy_file: "/var/lib/config-data/puppet-generated/manila/etc/manila/policy.yaml"
|
||||
roles:
|
||||
- check_manila_policy_file
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat Manila**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_manila_policy_file
|
@ -1,6 +0,0 @@
|
||||
=====================
|
||||
check_network_gateway
|
||||
=====================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_network_gateway
|
@ -1,50 +0,0 @@
|
||||
==================================
|
||||
check_nfv_ovsdpdk_zero_packet_loss
|
||||
==================================
|
||||
|
||||
--------------
|
||||
About the role
|
||||
--------------
|
||||
|
||||
This role validates the NFV OvS DPDK zero packet loss rules on OvS DPDK Compute nodes to find out the issues with NFV OvS Dpdk configuration.
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
- Validates PMD threads configuration.
|
||||
- Validates PMD threads included as part of isolcpus.
|
||||
- Checks any interrupts on Isolated CPU's.
|
||||
- Validates all the data paths are same on the server if ovs user bridge is used.
|
||||
- Validates bandwidth of the PCI slots.
|
||||
- Validates hugepages, CPU pinning, emulatorpin threads and libvirt queue size configuration on NFV instances.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
- Expects all the configuration files that are passed.
|
||||
|
||||
Example Playbook
|
||||
================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: servers
|
||||
roles:
|
||||
|
||||
- { role: check_nfv_ovsdpdk_zero_packet_loss }
|
||||
|
||||
License
|
||||
=======
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat TripleO DFG:NFV Integration**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_nfv_ovsdpdk_zero_packet_loss
|
@ -1,30 +0,0 @@
|
||||
======================
|
||||
check_ntp_reachability
|
||||
======================
|
||||
|
||||
--------------
|
||||
About The Role
|
||||
--------------
|
||||
|
||||
An Ansible role that will check if the time is synchronised with the NTP servers.
|
||||
The role fails, if the time is not NTP synchronised and prints NTP servers which
|
||||
chrony is trying to synchronise with. This role is recommended to run, if the
|
||||
``Undercloud`` deployment fails on NTP synchronisation task.
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
This role runs on ``Undercloud``.
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
Red Hat TripleO Validations Team
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_ntp_reachability
|
@ -1,6 +0,0 @@
|
||||
============
|
||||
check_reboot
|
||||
============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_reboot
|
@ -1,6 +0,0 @@
|
||||
==================
|
||||
check_rhsm_version
|
||||
==================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_rhsm_version
|
@ -1,72 +0,0 @@
|
||||
=================
|
||||
check_uc_hostname
|
||||
=================
|
||||
|
||||
--------------
|
||||
About the role
|
||||
--------------
|
||||
|
||||
Ansible role to check ``DockerInsecureRegistryAddress`` matches the UC hostname.
|
||||
|
||||
The purpose of this validation is mostly target for the FFWD 13 to 16.X procedure.
|
||||
|
||||
Customer is expected to follow the step `9.3. Configuring access to the
|
||||
undercloud registry
|
||||
<https://access.redhat.com/documentation/en-us/red_hat_openstack_platform/16.1/html-single/framework_for_upgrades_13_to_16.1/index#configuring-access-to-the-undercloud-registry-composable-services>`_
|
||||
|
||||
The customer needs to retrieve the control plane host name on the
|
||||
undercloud and add it into the ``DockerInsecureRegistryAddress``.
|
||||
|
||||
It might happen that the user misses this step or doesn't really add
|
||||
the right control plan host name and then ``podman`` fails to retrieve the
|
||||
containers.
|
||||
|
||||
To summarize what customer is expected to do:
|
||||
|
||||
- Run ``sudo hiera container_image_prepare_node_names`` to get host name(s)
|
||||
- Edit the containers-prepare-parameter.yaml file and the ``DockerInsecureRegistryAddress`` parameter with
|
||||
host name and IP of the undercloud.
|
||||
|
||||
This validation will:
|
||||
|
||||
- Pull ``DockerInsecureRegistryAddress`` (list) from the Openstack environment
|
||||
- Run ``sudo hiera container_image_prepare_node_names``
|
||||
- Verify the container_image_prepare_node_names returned from ``hiera`` is contained in the ``DockerInsecureRegistryAddress`` list.
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
This role will be executed pre Overcloud Update.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
No Dependencies
|
||||
|
||||
Example Playbook
|
||||
================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: servers
|
||||
vars:
|
||||
check_uc_hostname_debug: true
|
||||
roles:
|
||||
- check_uc_hostname
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat TripleO DFG:Upgrades**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_uc_hostname
|
@ -1,6 +0,0 @@
|
||||
=====================
|
||||
check_undercloud_conf
|
||||
=====================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/check_undercloud_conf
|
@ -1,6 +0,0 @@
|
||||
===================================
|
||||
collect_flavors_and_verify_profiles
|
||||
===================================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/collect_flavors_and_verify_profiles
|
@ -1,73 +0,0 @@
|
||||
===========
|
||||
compute_tsx
|
||||
===========
|
||||
|
||||
--------------
|
||||
About The Role
|
||||
--------------
|
||||
|
||||
An Ansible role to verify that the compute nodes have the appropriate TSX flags
|
||||
before proceeding with an upgrade.
|
||||
|
||||
``RHEL-8.3`` kernel disabled the **Intel TSX** (Transactional Synchronization
|
||||
Extensions) feature by default as a preemptive security measure, but it breaks
|
||||
live migration from ``RHEL-7.9`` (or even ``RHEL-8.1`` or ``RHEL-8.2``) to
|
||||
``RHEL-8.3``.
|
||||
|
||||
Operators are expected to explicitly define the TSX flag in their KernelArgs for
|
||||
the compute role to prevent live-migration issues during the upgrade process.
|
||||
|
||||
This role is intended to be called by tripleo via the kernel deployment
|
||||
templates.
|
||||
|
||||
It's also possible to call the role as a standalone.
|
||||
|
||||
This also impacts upstream CentOS systems
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
This role needs to be run on an ``Undercloud`` with a deployed ``Overcloud``.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
No dependencies.
|
||||
|
||||
Example Playbook
|
||||
================
|
||||
|
||||
Standard playbook:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: nova_libvirt
|
||||
roles:
|
||||
- { role: compute_tsx}
|
||||
|
||||
Reporting playbook with no failure:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: nova_libvirt
|
||||
vars:
|
||||
- compute_tsx_warning: true
|
||||
roles:
|
||||
- { role: compute_tsx }
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat TripleO DFG:Compute Squad:Deployment**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/compute_tsx
|
@ -1,6 +0,0 @@
|
||||
================
|
||||
container_status
|
||||
================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/container_status
|
@ -1,6 +0,0 @@
|
||||
================
|
||||
controller_token
|
||||
================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/controller_token
|
@ -1,6 +0,0 @@
|
||||
==================
|
||||
controller_ulimits
|
||||
==================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/controller_ulimits
|
@ -1,6 +0,0 @@
|
||||
=================
|
||||
ctlplane_ip_range
|
||||
=================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/ctlplane_ip_range
|
@ -1,6 +0,0 @@
|
||||
==================
|
||||
default_node_count
|
||||
==================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/default_node_count
|
@ -1,6 +0,0 @@
|
||||
===================
|
||||
deprecated_services
|
||||
===================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/deprecated_services
|
@ -1,6 +0,0 @@
|
||||
================
|
||||
dhcp_validations
|
||||
================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/dhcp_validations
|
@ -1,49 +0,0 @@
|
||||
============
|
||||
fips_enabled
|
||||
============
|
||||
|
||||
--------------
|
||||
About The Role
|
||||
--------------
|
||||
|
||||
This role will check if system has turned on FIPS.
|
||||
This validation can be enabled or disabled within the variable:
|
||||
`enforce_fips_validation`, setting it to `true` will
|
||||
enable the validation, setting to `false` will disable it.
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
Turned on FIPS.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
No dependencies.
|
||||
|
||||
Example Playbook
|
||||
================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: localhost
|
||||
gather_facts: false
|
||||
roles:
|
||||
- { role: fips_enabled }
|
||||
|
||||
Licence
|
||||
=======
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat TripleO DFG:Security Squad:OG**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/fips_enabled
|
@ -1,6 +0,0 @@
|
||||
===========
|
||||
frr_status
|
||||
===========
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/frr_status
|
@ -1,6 +0,0 @@
|
||||
==========================
|
||||
healthcheck_service_status
|
||||
==========================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/healthcheck_service_status
|
@ -1,6 +0,0 @@
|
||||
===========
|
||||
image_serve
|
||||
===========
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/image_serve
|
@ -1,6 +0,0 @@
|
||||
=========================
|
||||
ironic_boot_configuration
|
||||
=========================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/ironic_boot_configuration
|
@ -1,6 +0,0 @@
|
||||
======================
|
||||
mysql_open_files_limit
|
||||
======================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/mysql_open_files_limit
|
@ -1,6 +0,0 @@
|
||||
===================
|
||||
network_environment
|
||||
===================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/network_environment
|
@ -1,6 +0,0 @@
|
||||
====================
|
||||
neutron_sanity_check
|
||||
====================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/neutron_sanity_check
|
@ -1,6 +0,0 @@
|
||||
==========
|
||||
node_disks
|
||||
==========
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/node_disks
|
@ -1,13 +0,0 @@
|
||||
===========
|
||||
node_health
|
||||
===========
|
||||
|
||||
Role is used by the :ref:`pre-upgrade_node-health` validation to verify state of the overcloud
|
||||
compute services and baremetal nodes they are running on.
|
||||
|
||||
As the clients contacted require Keystone authentication, the role requires
|
||||
relevant values, such as Keystone endpoint and username, for correct operation.
|
||||
Otherwise it will produce authentication error.
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/node_health
|
@ -1,48 +0,0 @@
|
||||
===================
|
||||
nova_event_callback
|
||||
===================
|
||||
|
||||
--------------
|
||||
About the role
|
||||
--------------
|
||||
|
||||
An Ansible role to check if the **Nova** ``auth_url`` in **Neutron** is
|
||||
configured correctly on the **Overcloud Controller(s)**.
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
None.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
None.
|
||||
|
||||
Example Playbook
|
||||
================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: Controller
|
||||
vars:
|
||||
neutron_config_file: /path/to/neutron.conf
|
||||
roles:
|
||||
- nova_event_callback
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat TripleO DFG:Compute Deployment Squad**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/nova_event_callback
|
@ -1,6 +0,0 @@
|
||||
===========
|
||||
nova_status
|
||||
===========
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/nova_status
|
@ -1,6 +0,0 @@
|
||||
=================
|
||||
nova_svirt
|
||||
=================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/nova_svirt
|
@ -1,6 +0,0 @@
|
||||
======================
|
||||
openshift_on_openstack
|
||||
======================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/openshift_on_openstack
|
@ -1,6 +0,0 @@
|
||||
===================
|
||||
openstack_endpoints
|
||||
===================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/openstack_endpoints
|
@ -1,93 +0,0 @@
|
||||
=====================
|
||||
oslo_config_validator
|
||||
=====================
|
||||
|
||||
--------------
|
||||
About the role
|
||||
--------------
|
||||
|
||||
An Ansible role that will loop through all the containers on selected host, find the Openstack service configuration file
|
||||
and leverage the [oslo-config-validator](https://docs.openstack.org/oslo.config/latest/cli/validator.html) utility to validate the current running configuration.
|
||||
|
||||
It's also possible to generate a report that contains all differences between the sample or default values with current running configuration.
|
||||
|
||||
Finally, it will also verify that the current running configuration doesn't contain any known invalid settings that might have been deprecated and removed in previous versions.
|
||||
|
||||
Exceptions
|
||||
==========
|
||||
|
||||
Some services like ``cinder`` can have dynamic configuration sections. In ``cinder``'s case, this is for the storage backends. To perform validation on these dynamic sections, we need to generate a yaml formatted config sample with ``oslo-config-generator`` beforehand, append a new sample configuration for each storage backends, and validate against that newly generated configuration file by passing ``--opt-data`` to the ``oslo-config-validator`` command instead of using ``--namespaces``. Since generating a sample config adds some delay to the validation, this is not the default way of validating, we prefer to validate directly using ``--namespaces``.
|
||||
|
||||
NOTE: At the time of writing this role, ``oslo-config-generator`` has a bug [1] when generating yaml config files, most notably with ``cinder``. Since the inclusion of oslo.config patch can't be garanteed, the role will inject this patch [2] to the oslo.config code, inside the validation container. This code change is ephemeral for the time of the configuration file generation. The reason why we want to inject this patch is because it's possible that we run the validation on containers that were created before it was merged. This ensures a smooth validation across the board.
|
||||
|
||||
[1] https://bugs.launchpad.net/oslo.config/+bug/1928582
|
||||
[2] https://review.opendev.org/c/openstack/oslo.config/+/790883
|
||||
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
This role needs to be run on an Undercloud with a deployed Overcloud.
|
||||
|
||||
Role Variables
|
||||
==============
|
||||
|
||||
- oslo_config_validator_validation: Wether or not to run assertions on produced outputs. That also means that the role will fail if anything is output post-filtering. If this is enabled with the reporting, this will most likely trigger a failure unless executed against default configuration
|
||||
- oslo_config_validator_report: Wether or not we compare the configuration files found with the default config
|
||||
- oslo_config_validator_invalid_settings: When running validation, wether or not we should check for invalid settings. This adds to the time it takes to complete validation because of the way the validations_read_ini module works. This won't work without ``oslo_config_validator_validation`` enabled.
|
||||
- oslo_config_validator_report_path: The folder used when generating the reports.
|
||||
- oslo_config_validator_global_ignored_messages: List of regular expressions that will filter out messages globally, across all namespaces
|
||||
- oslo_config_validator_namespaces_config: Specific namespace configurations. It contains namespace-specific ignored patterns as well as invalid settings configuration.
|
||||
- oslo_config_validator_service_configs: Mapping of known Openstack services with their namespace configuration.
|
||||
- oslo_config_validator_checked_services: List of services being validated.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
- podman_container
|
||||
- podman_container_info
|
||||
- validations_read_ini
|
||||
- https://review.opendev.org/c/openstack/oslo.config/+/790883
|
||||
|
||||
|
||||
|
||||
Example Reporting Playbook
|
||||
==========================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: all
|
||||
vars:
|
||||
- oslo_config_validator_report: true
|
||||
- oslo_config_validator_validation: false
|
||||
roles:
|
||||
- { role: oslo_config_validator}
|
||||
|
||||
Example playbook to validate only one service
|
||||
=============================================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: all
|
||||
vars:
|
||||
- oslo_config_validator_checked_services:
|
||||
- nova
|
||||
roles:
|
||||
- { role: oslo_config_validator}
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat TripleO DFG:Compute Deployment Squad**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/oslo_config_validator
|
@ -1,47 +0,0 @@
|
||||
========================
|
||||
overcloud_service_status
|
||||
========================
|
||||
|
||||
--------------
|
||||
About The Role
|
||||
--------------
|
||||
|
||||
An Ansible role to verify the ``Overcloud`` services states after a deployment
|
||||
or an update. It checks the ``API /os-services`` and looks for deprecated
|
||||
services (``nova-consoleauth``) or any down services.
|
||||
|
||||
Requirements
|
||||
============
|
||||
|
||||
This role needs to be run on an ``Undercloud`` with a deployed ``Overcloud``.
|
||||
|
||||
Dependencies
|
||||
============
|
||||
|
||||
No dependencies.
|
||||
|
||||
Example Playbook
|
||||
================
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: undercloud
|
||||
roles:
|
||||
- { role: overcloud_service_status }
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
==================
|
||||
|
||||
**Red Hat TripleO DFG:Compute Squad:Deployment**
|
||||
|
||||
----------------
|
||||
Full Description
|
||||
----------------
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/overcloud_service_status
|
@ -1,6 +0,0 @@
|
||||
============
|
||||
ovs_dpdk_pmd
|
||||
============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/ovs_dpdk_pmd
|
@ -1,6 +0,0 @@
|
||||
================
|
||||
pacemaker_status
|
||||
================
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/pacemaker_status
|
@ -1,6 +0,0 @@
|
||||
===============
|
||||
package_version
|
||||
===============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/package_version
|
@ -1,6 +0,0 @@
|
||||
===============
|
||||
rabbitmq_limits
|
||||
===============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/rabbitmq_limits
|
@ -1,47 +0,0 @@
|
||||
=====
|
||||
repos
|
||||
=====
|
||||
|
||||
An Ansible role to check the correctness of current repositories.
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
This role could be used before/after an Undercloud or an Overcloud has been
|
||||
deployed.
|
||||
|
||||
Role Variables
|
||||
--------------
|
||||
|
||||
- None
|
||||
|
||||
Dependencies
|
||||
------------
|
||||
|
||||
No dependencies.
|
||||
|
||||
Example Playbook
|
||||
----------------
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- hosts: undercloud
|
||||
roles:
|
||||
- role: repos
|
||||
|
||||
- hosts: overcloud
|
||||
roles:
|
||||
- role: repos
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Apache
|
||||
|
||||
Author Information
|
||||
------------------
|
||||
|
||||
Red Hat TripleO Validations Team
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/repos
|
@ -1,6 +0,0 @@
|
||||
============
|
||||
stack_health
|
||||
============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/stack_health
|
@ -1,6 +0,0 @@
|
||||
==============
|
||||
stonith_exists
|
||||
==============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/stonith_exists
|
@ -1,6 +0,0 @@
|
||||
============
|
||||
switch_vlans
|
||||
============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/switch_vlans
|
@ -1,6 +0,0 @@
|
||||
===============
|
||||
system_encoding
|
||||
===============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/system_encoding
|
@ -1,6 +0,0 @@
|
||||
==============
|
||||
tls_everywhere
|
||||
==============
|
||||
|
||||
.. ansibleautoplugin::
|
||||
:role: roles/tls_everywhere
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user