Cleanup roles that have moved to opendev/base-jobs

The log processing submission roles are all in opendev/base-jobs now. To
avoid confusion remove them from this repo.

Change-Id: I3b745931e13361001b893302f21885b92e82bd03
This commit is contained in:
Clark Boylan
2019-08-23 13:38:08 -07:00
parent 5543eb8224
commit 4a95fc4ae9
10 changed files with 0 additions and 418 deletions

View File

@@ -1,6 +0,0 @@
A module to submit a log processing job.
This role is a container for an Ansible module which processes a log
directory and submits jobs to a log processing gearman queue. The
role itself performs no actions, and is intended only to be used by
other roles as a dependency to supply the module.

View File

@@ -1,209 +0,0 @@
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# Copyright (C) 2017 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import re
from ansible.module_utils.six.moves import urllib
from ansible.module_utils.basic import AnsibleModule, get_exception
import gear
class FileMatcher(object):
def __init__(self, name, tags):
self._name = name
self.name = re.compile(name)
self.tags = tags
def matches(self, s):
if self.name.search(s):
return True
class File(object):
def __init__(self, name, tags):
self._name = name
self._tags = tags
@property
def name(self):
return self._name
@name.setter
def name(self, value):
raise Exception("Cannot update File() objects they must be hashable")
@property
def tags(self):
return self._tags
@tags.setter
def tags(self, value):
raise Exception("Cannot update File() objects they must be hashable")
def toDict(self):
return dict(name=self.name,
tags=self.tags)
# We need these objects to be hashable so that we can use sets
# below.
def __eq__(self, other):
return self.name == other.name
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.name)
class LogMatcher(object):
def __init__(self, server, port, config, success, log_url, host_vars):
self.client = gear.Client()
self.client.addServer(server, port)
self.hosts = host_vars
self.zuul = list(host_vars.values())[0]['zuul']
self.success = success
self.log_url = log_url
self.matchers = []
for f in config['files']:
self.matchers.append(FileMatcher(f['name'], f.get('tags', [])))
def findFiles(self, path):
results = set()
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
fn = os.path.join(dirpath, filename)
partial_name = fn[len(path) + 1:]
for matcher in self.matchers:
if matcher.matches(partial_name):
results.add(File(partial_name, matcher.tags))
break
return results
def submitJobs(self, jobname, files):
self.client.waitForServer(90)
ret = []
for f in files:
output = self.makeOutput(f)
output = json.dumps(output).encode('utf8')
job = gear.TextJob(jobname, output)
self.client.submitJob(job, background=True)
ret.append(dict(handle=job.handle,
arguments=output))
return ret
def makeOutput(self, file_object):
output = {}
output['retry'] = False
output['event'] = self.makeEvent(file_object)
output['source_url'] = output['event']['fields']['log_url']
return output
def makeEvent(self, file_object):
out_event = {}
out_event["fields"] = self.makeFields(file_object.name)
basename = os.path.basename(file_object.name)
out_event["tags"] = [basename] + file_object.tags
if basename.endswith(".gz"):
# Backward compat for e-r which relies on tag values
# without the .gx suffix
out_event["tags"].append(basename[:-3])
return out_event
def makeFields(self, filename):
hosts = [h for h in self.hosts.values() if 'nodepool' in h]
zuul = self.zuul
fields = {}
fields["filename"] = filename
fields["build_name"] = zuul['job']
fields["build_status"] = self.success and 'SUCCESS' or 'FAILURE'
# TODO: this is too simplistic for zuul v3 multinode jobs
node = hosts[0]
fields["build_node"] = node['nodepool']['label']
fields["build_hostids"] = [h['nodepool']['host_id'] for h in hosts
if 'host_id' in h['nodepool']]
# TODO: should be build_executor, or removed completely
fields["build_master"] = zuul['executor']['hostname']
fields["project"] = zuul['project']['name']
# The voting value is "1" for voting, "0" for non-voting
fields["voting"] = int(zuul['voting'])
# TODO(clarkb) can we do better without duplicated data here?
fields["build_uuid"] = zuul['build']
fields["build_short_uuid"] = fields["build_uuid"][:7]
# TODO: this should be build_pipeline
fields["build_queue"] = zuul['pipeline']
# TODO: this is not interesteding anymore
fields["build_ref"] = zuul['ref']
fields["build_branch"] = zuul.get('branch', 'UNKNOWN')
# TODO: remove
fields["build_zuul_url"] = "N/A"
if 'change' in zuul:
fields["build_change"] = zuul['change']
fields["build_patchset"] = zuul['patchset']
elif 'newrev' in zuul:
fields["build_newrev"] = zuul.get('newrev', 'UNKNOWN')
fields["node_provider"] = node['nodepool']['provider']
log_url = urllib.parse.urljoin(self.log_url, filename)
fields["log_url"] = log_url
if 'executor' in zuul and 'hostname' in zuul['executor']:
fields["zuul_executor"] = zuul['executor']['hostname']
return fields
def main():
module = AnsibleModule(
argument_spec=dict(
gearman_server=dict(type='str'),
gearman_port=dict(type='int', default=4730),
# TODO: add ssl support
host_vars=dict(type='dict'),
path=dict(type='path'),
config=dict(type='dict'),
success=dict(type='bool'),
log_url=dict(type='str'),
job=dict(type='str'),
),
)
p = module.params
results = dict(files=[], jobs=[], invocation={})
try:
l = LogMatcher(p.get('gearman_server'),
p.get('gearman_port'),
p.get('config'),
p.get('success'),
p.get('log_url'),
p.get('host_vars'))
files = l.findFiles(p['path'])
for f in files:
results['files'].append(f.toDict())
for handle in l.submitJobs(p['job'], files):
results['jobs'].append(handle)
module.exit_json(**results)
except Exception:
e = get_exception()
module.fail_json(msg='Unknown error',
details=repr(e),
**results)
if __name__ == '__main__':
main()

View File

@@ -1,44 +0,0 @@
Submit a log processing job to the logstash workers.
This role examines all of the files in the log subdirectory of the job
work dir and any matching filenames are submitted to the gearman queue
for the logstash log processor, along with any tags configured for
those filenames.
**Role Variables**
.. zuul:rolevar:: logstash_gearman_server
:default: logstash.openstack.org
The gearman server to use.
.. zuul:rolevar:: logstash_processor_config
:type: dict
The default file configuration for the logstash parser.
This is a dictionary that contains a single entry:
.. zuul:rolevar:: files
:type: list
A list of files to search for in the ``work/logs/`` directory on
the executor. Each file will be compared to the entries in this
list, and if it matches, a processing job will be submitted to
the logstash processing queue, along with the tags for the
matching entry. Order is important: the first matcing is used.
This field is list of dictionaries, as follows:
.. zuul:rolevar:: name
The name of the file to process. This is treated as an
unanchored regular expression. To match the full path
(underneath ``work/logs``) start and end the string with
``^`` and ``$`` respectively.
.. zuul:rolevar:: tags
:type: list
A list of strings indicating the logstash processing tags
associated with this file. These may be used to indicate the
file format to the parser.

View File

@@ -1,88 +0,0 @@
logstash_gearman_server: logstash.openstack.org
# For every file found in the logs directory (and its subdirs), the
# module will attempt to match the filenames below. If there is a
# match, the file is submitted to the logstash processing queue, along
# with the tags for that match. The first match wins, so be sure to
# list more specific names first. The names are un-anchored regular
# expressions (so if you need to match the root (i.e, the work/logs/
# directory), be sure to anchor them with ^).
logstash_processor_config:
files:
- name: job-output\.txt
tags:
- console
- console.html
- name: grenade\.sh\.txt
tags:
- console
- console.html
- name: devstacklog\.txt(?!.*summary)
tags:
- console
- console.html
- name: apache/keystone\.txt
tags:
- screen
- oslofmt
- name: apache/horizon_error\.txt
tags:
- apacheerror
# TODO(clarkb) Add swift proxy logs here.
- name: syslog\.txt
tags:
- syslog
- name: tempest\.txt
tags:
- screen
- oslofmt
- name: javelin\.txt
tags:
- screen
- oslofmt
# Neutron index log files (files with messages from all test cases)
- name: dsvm-functional-index\.txt
tags:
- oslofmt
- name: dsvm-fullstack-index\.txt
tags:
- oslofmt
- name: screen-s-account\.txt
tags:
- screen
- apachecombined
- name: screen-s-container\.txt
tags:
- screen
- apachecombined
- name: screen-s-object\.txt
tags:
- screen
- apachecombined
# tripleo logs
- name: postci\.txt
tags:
- console
- postci
- name: var/log/extra/logstash\.txt
tags:
- console
- postci
- name: var/log/extra/errors\.txt
tags:
- console
- errors
# wildcard logs
- name: devstack-gate-.*\.txt
tags:
- console
- console.html
# NOTE(mriedem): Logs that are known logstash index OOM killers are
# blacklisted here until fixed.
# screen-kubelet.txt: https://bugs.launchpad.net/kuryr-kubernetes/+bug/1795067
# screen-mistral-engine.txt: https://bugs.launchpad.net/mistral/+bug/1795068
# screen-monasca-persister.txt: https://storyboard.openstack.org/#!/story/2003911
# screen-ovn-northd.txt: https://bugs.launchpad.net/networking-ovn/+bug/1795069
- name: screen-(?!(peakmem_tracker|dstat|karaf|kubelet|mistral-engine|monasca-persister|ovn-northd)).*\.txt
tags:
- screen
- oslofmt

View File

@@ -1,2 +0,0 @@
dependencies:
- role: submit-log-processor-jobs

View File

@@ -1,9 +0,0 @@
- name: Submit logstash processing jobs to log processors
submit_log_processor_jobs:
gearman_server: "{{ logstash_gearman_server }}"
job: "push-log"
config: "{{ logstash_processor_config }}"
success: "{{ zuul_success }}"
host_vars: "{{ hostvars }}"
path: "{{ zuul.executor.log_root }}"
log_url: "{{ (lookup('file', zuul.executor.result_data_file) | from_json).get('zuul').get('log_url') }}"

View File

@@ -1,36 +0,0 @@
Submit a log processing job to the subunit workers.
This role examines all of the files in the log subdirectory of the job
work dir and any matching filenames are submitted to the gearman queue
for the subunit log processor.
**Role Variables**
.. zuul:rolevar:: subunit_gearman_server
:default: logstash.openstack.org
The gearman server to use.
.. zuul:rolevar:: subunit_processor_config
:type: dict
The default file configuration for the subunit parser.
This is a dictionary that contains a single entry:
.. zuul:rolevar:: files
:type: list
A list of files to search for in the ``work/logs/`` directory on
the executor. Each file will be compared to the entries in this
list, and if it matches, a processing job will be submitted to
the subunit processing queue, along with the tags for the
matching entry. Order is important: the first matcing is used.
This field is list of dictionaries, as follows:
.. zuul:rolevar:: name
The name of the file to process. This is treated as an
unanchored regular expression. To match the full path
(underneath ``work/logs``) start and end the string with
``^`` and ``$`` respectively.

View File

@@ -1,12 +0,0 @@
subunit_gearman_server: logstash.openstack.org
# For every file found in the logs directory (and its subdirs), the
# module will attempt to match the filenames below. If there is a
# match, the file is submitted to the subunit processing queue, along
# with the tags for that match. The first match wins, so be sure to
# list more specific names first. The names are un-anchored regular
# expressions (so if you need to match the root (i.e, the work/logs/
# directory), be sure to anchor them with ^).
subunit_processor_config:
files:
- name: testrepository.subunit
- name: karma.subunit

View File

@@ -1,2 +0,0 @@
dependencies:
- role: submit-log-processor-jobs

View File

@@ -1,10 +0,0 @@
- name: Submit subunit processing jobs to log processors
when: zuul.pipeline in ['gate', 'periodic', 'post']
submit_log_processor_jobs:
gearman_server: "{{ subunit_gearman_server }}"
job: "push-subunit"
config: "{{ subunit_processor_config }}"
success: "{{ zuul_success }}"
host_vars: "{{ hostvars }}"
path: "{{ zuul.executor.log_root }}"
log_url: "{{ (lookup('file', zuul.executor.result_data_file) | from_json).get('zuul').get('log_url') }}"