Add job for validating logscraper and log gearman services

We want to ensure that all services that have been deployed
are running correctly before merging new change.

Change-Id: I513aa328e720749130fafcd82920585ff99b867c
This commit is contained in:
Daniel Pawlik 2021-11-22 11:07:25 +01:00
parent 02ea6df01d
commit dae8dcc0b1
6 changed files with 202 additions and 1 deletions

10
.yamllint Normal file
View File

@ -0,0 +1,10 @@
---
extends: default
rules:
line-length:
# matches hardcoded 160 value from ansible-lint
max: 160
ignore: |
.zuul.yaml

View File

@ -1,4 +1,13 @@
---
- job:
name: ci-log-processing-functional-test-fedora-34
description: Test is validating ci log processing services
run: ansible/playbooks/check-services.yml
nodeset:
nodes:
- name: fedora-34
label: fedora-34
- project:
templates:
- publish-tox-docs-infra
@ -8,4 +17,5 @@
- openstack-tox-linters
- openstack-tox-pep8
- openstack-tox-py38
- ci-log-processing-functional-test-fedora-34
gate: *logcheck

View File

@ -0,0 +1,24 @@
---
- hosts: all
become: true
vars:
# logscraper
tenant_builds:
- tenant: openstack
gearman_port: 4730
gearman_server: 0.0.0.0
zuul_api_url: https://zuul.opendev.org/api/tenant/openstack
zuul_api_urls:
- https://zuul.opendev.org/api/tenant/openstack
insecure: false
job_names: []
# loggearman - client
source_url: https://0.0.0.0
# loggearman - worker
output_host: 0.0.0.0
output_port: 9999
gearman_host: 0.0.0.0
gearman_port: 4730
log_cert_verify: false
roles:
- check-services

View File

@ -0,0 +1,124 @@
---
- name: Install packages
package:
name: podman
state: present
### OPENSEARCH ####
- name: Setup Opensearch
shell: >
podman run -d --name opensearch \
--network host \
-e "discovery.type=single-node" \
quay.rdoproject.org/software-factory/opensearch:1.1.0
- name: Wait for Opensearch to be up
wait_for:
host: 0.0.0.0
port: 9200
delay: 10
timeout: 300
- name: Wait for Opensearch to be up
uri:
url: "https://0.0.0.0:9200"
user: "admin"
password: "admin"
force_basic_auth: true
method: GET
validate_certs: false
status_code: "200"
register: result
until: result.status == 200
retries: 30
delay: 10
### LOGSTASH ###
- name: Create require directories for Logstash
file:
path: "/etc/logstash/conf.d"
state: directory
recurse: true
owner: '1000'
group: '1000'
- name: Create Logstash config
copy:
content: |
# opensearch analyze is working on 9600
http.port: 9601
http.host: 0.0.0.0
dest: /etc/logstash/logstash.yml
mode: '0644'
owner: '1000'
group: '1000'
- name: Setup pipeline configuration
template:
src: logstash-index.conf.j2
dest: /etc/logstash/conf.d/logscraper.conf
mode: '0644'
owner: '1000'
group: '1000'
- name: Setup Logstash service
shell: >
podman run -d --name logstash \
--network host \
-v /etc/logstash/conf.d:/usr/share/logstash/pipeline:z \
-v /etc/logstash/logstash.yml:/usr/share/logstash/config/logstash.yml:z \
quay.rdoproject.org/software-factory/logstash:7.10.1
- name: Wait for Logstash to be up
wait_for:
host: 0.0.0.0
port: 9999
delay: 10
timeout: 300
### Loggearman ###
- name: Setup loggearman service
include_role:
name: loggearman
### Logscraper ###
- name: Setup logscraper service
include_role:
name: logscraper
# Flush handlers before running test
- name: Force all notified handlers to run now
meta: flush_handlers
### service validation ###
- name: Ensure that all services are available and running
shell: |
systemctl is-active -q {{ item }}
loop:
- logscraper-openstack
- loggearman-client
- loggearman-worker
register: _service_status
failed_when: _service_status.rc != 0
- name: Check if log gearman client is listening
wait_for:
host: "{{ gearman_host }}"
port: "{{ gearman_port }}"
delay: 10
timeout: 300
- name: Get Opensearch indices
uri:
url: "https://localhost:9200/_cat/indices"
user: "admin"
password: "admin"
force_basic_auth: true
method: GET
validate_certs: false
status_code: "200"
return_content: true
register: _opensearch_indices
until: "'logstash-logscraper' in _opensearch_indices.content"
retries: 30
delay: 10

View File

@ -0,0 +1,33 @@
input {
tcp {
host => "0.0.0.0"
port => "{{ output_port }}"
codec => json_lines {}
type => "zuul"
}
}
filter {
grok {
match => ["message", "(?<timestamp>[-0-9]{10}\s+[0-9.:]{12})(?<ms>[0-9]{3}) (?<sep>\|)%{GREEDYDATA:message}"]
overwrite => [ "message" ]
}
if [message] =~ /^\s*$/ {
drop { }
}
date {
match => ["timestamp", "yyyy-MM-dd HH:mm:ss.SSS"]
timezone => "UTC"
}
}
output {
elasticsearch {
hosts => ['https://0.0.0.0:9200']
index => "logstash-logscraper-%{+YYYY.MM.dd}"
user => 'admin'
password => "admin"
ssl => true
ssl_certificate_verification => false
ilm_enabled => false
}
}

View File

@ -24,4 +24,4 @@ output_mode: tcp
crm114_script: ""
crm114_data: ""
log_ca_certs: ""
log_cert_verify: True
log_cert_verify: true