2013-01-16 15:01:25 -08:00
|
|
|
input {
|
2013-05-02 12:02:08 -07:00
|
|
|
tcp {
|
|
|
|
host => "localhost"
|
|
|
|
port => 9999
|
2014-02-11 17:03:14 -08:00
|
|
|
codec => json_lines {}
|
2013-05-02 16:02:19 -07:00
|
|
|
type => "jenkins"
|
2013-04-24 15:20:14 -07:00
|
|
|
}
|
|
|
|
}
|
2013-01-16 15:01:25 -08:00
|
|
|
|
2013-04-24 15:20:14 -07:00
|
|
|
# You can check grok patterns at http://grokdebug.herokuapp.com/
|
|
|
|
filter {
|
2013-10-16 11:34:47 -07:00
|
|
|
if "screen" in [tags] and [message] =~ "^\+ " {
|
|
|
|
drop {}
|
|
|
|
}
|
2014-03-10 13:54:39 -07:00
|
|
|
if "console" in [tags] or "console.html" in [tags] {
|
2013-10-16 11:34:47 -07:00
|
|
|
if [message] == "<pre>" or [message] == "</pre>" {
|
|
|
|
drop {}
|
|
|
|
}
|
|
|
|
multiline {
|
|
|
|
negate => true
|
|
|
|
pattern => "^%{TIMESTAMP_ISO8601} \|"
|
|
|
|
what => "previous"
|
|
|
|
stream_identity => "%{host}.%{filename}"
|
|
|
|
}
|
|
|
|
grok {
|
|
|
|
# Do multiline matching as the above mutliline filter may add newlines
|
|
|
|
# to the log messages.
|
|
|
|
match => { "message" => "(?m)^%{TIMESTAMP_ISO8601:logdate} \| %{GREEDYDATA:logmessage}" }
|
|
|
|
add_field => { "received_at" => "%{@timestamp}" }
|
|
|
|
}
|
|
|
|
} else if "oslofmt" in [tags] {
|
|
|
|
multiline {
|
|
|
|
negate => true
|
2017-05-03 08:34:04 -07:00
|
|
|
pattern => "^(%{TIMESTAMP_ISO8601}|%{SYSLOGTIMESTAMP}) "
|
2013-10-16 11:34:47 -07:00
|
|
|
what => "previous"
|
|
|
|
stream_identity => "%{host}.%{filename}"
|
|
|
|
}
|
|
|
|
multiline {
|
|
|
|
negate => false
|
2015-07-14 14:03:46 -07:00
|
|
|
# NOTE(mriedem): oslo.log 1.2.0 changed the logging_exception_prefix
|
|
|
|
# config option from using TRACE to ERROR so we have to handle both.
|
2015-11-09 11:32:37 -05:00
|
|
|
#
|
|
|
|
# NOTE(sdague): stack traces always include process id, so
|
|
|
|
# NUMBER being required element here is important, otherwise
|
|
|
|
# ERROR messages just fold into the previous messages, which are
|
|
|
|
# typically INFO.
|
2017-05-03 08:34:04 -07:00
|
|
|
pattern => "^(%{TIMESTAMP_ISO8601}|%{SYSLOGTIMESTAMP})%{SPACE}%{NUMBER}%{SPACE}(TRACE|ERROR)"
|
2013-10-16 11:34:47 -07:00
|
|
|
what => "previous"
|
|
|
|
stream_identity => "%{host}.%{filename}"
|
|
|
|
}
|
|
|
|
grok {
|
|
|
|
# Do multiline matching as the above mutliline filter may add newlines
|
|
|
|
# to the log messages.
|
|
|
|
# TODO move the LOGLEVELs into a proper grok pattern.
|
2017-04-27 15:40:00 -04:00
|
|
|
match => { "message" => "(?m)^(%{TIMESTAMP_ISO8601:logdate}|%{SYSLOGTIMESTAMP:logdate})%{SPACE}(%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:|%{NUMBER:pid})?%{SPACE}?(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR) \[?\b%{NOTSPACE:module}\b\]?%{SPACE}?%{GREEDYDATA:logmessage}?" }
|
2013-10-16 11:34:47 -07:00
|
|
|
add_field => { "received_at" => "%{@timestamp}" }
|
|
|
|
}
|
|
|
|
} else if "apachecombined" in [tags] {
|
|
|
|
grok {
|
|
|
|
match => { "message" => "%{COMBINEDAPACHELOG}" }
|
|
|
|
add_field => { "received_at" => "%{@timestamp}" }
|
|
|
|
add_field => { "logdate" => "%{timestamp}" }
|
|
|
|
add_field => { "logmessage" => "%{verb} %{request} %{response}" }
|
|
|
|
}
|
2014-03-14 16:21:10 -04:00
|
|
|
} else if "apacheerror" in [tags] {
|
|
|
|
grok {
|
|
|
|
match => { "message" => "\[(?<logdate>%{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}%{SPACE}%{TZ}?)\]%{SPACE}\[%{LOGLEVEL:loglevel}\]%{SPACE}%{GREEDYDATA:logmessage}" }
|
|
|
|
add_field => { "received_at" => "%{@timestamp}" }
|
|
|
|
}
|
2014-08-01 13:31:34 -07:00
|
|
|
} else if "libvirt" in [tags] {
|
2014-01-30 17:04:22 -08:00
|
|
|
grok {
|
|
|
|
# libvirtd grok filter adapted from
|
|
|
|
# https://github.com/OpenStratus/openstack-logstash/blob/master/agent.conf
|
|
|
|
match => { "message" => "%{TIMESTAMP_ISO8601:logdate}:%{SPACE}%{NUMBER:pid}:%{SPACE}%{LOGLEVEL:loglevel}%{SPACE}:%{SPACE}%{GREEDYDATA:logmessage}" }
|
|
|
|
add_field => { "received_at" => "%{@timestamp}" }
|
|
|
|
}
|
2013-10-16 11:34:47 -07:00
|
|
|
} else if "syslog" in [tags] {
|
|
|
|
grok {
|
|
|
|
# Syslog grok filter adapted from
|
|
|
|
# http://cookbook.logstash.net/recipes/syslog-pri/syslog.conf
|
|
|
|
match => { "message" => "%{SYSLOGTIMESTAMP:logdate}%{SPACE}%{SYSLOGHOST:syslog_host}?%{SPACE}%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:? %{GREEDYDATA:logmessage}" }
|
|
|
|
add_field => { "received_at" => "%{@timestamp}" }
|
|
|
|
}
|
2013-08-06 11:52:26 -07:00
|
|
|
}
|
2013-05-30 11:28:14 -07:00
|
|
|
|
|
|
|
# Filters below here should be consistent for all Jenkins log formats.
|
2013-10-16 11:34:47 -07:00
|
|
|
# Remove DEBUG logs to reduce the amount of data that needs to be processed.
|
|
|
|
if [loglevel] == "DEBUG" {
|
|
|
|
drop {}
|
2013-04-24 15:20:14 -07:00
|
|
|
}
|
2013-10-16 11:34:47 -07:00
|
|
|
|
|
|
|
if ! ("_grokparsefailure" in [tags]) {
|
|
|
|
date {
|
2014-03-14 16:21:10 -04:00
|
|
|
match => [ "logdate",
|
|
|
|
"yyyy-MM-dd HH:mm:ss.SSS",
|
2015-11-30 11:46:56 -08:00
|
|
|
"yyyy-MM-dd HH:mm:ss.SSSSSS",
|
2014-03-14 16:21:10 -04:00
|
|
|
"yyyy-MM-dd HH:mm:ss,SSS",
|
|
|
|
"yyyy-MM-dd HH:mm:ss",
|
|
|
|
"MMM d HH:mm:ss",
|
|
|
|
"MMM dd HH:mm:ss",
|
|
|
|
"dd/MMM/yyyy:HH:mm:ss Z",
|
|
|
|
"yyyy-MM-dd HH:mm:ss.SSSZ",
|
|
|
|
"E MMM dd HH:mm:ss yyyy Z",
|
2015-11-30 11:46:56 -08:00
|
|
|
"E MMM dd HH:mm:ss yyyy",
|
|
|
|
"ISO8601"
|
2014-03-14 16:21:10 -04:00
|
|
|
]
|
2013-10-16 11:34:47 -07:00
|
|
|
timezone => "UTC"
|
|
|
|
}
|
|
|
|
mutate {
|
|
|
|
replace => { "message" => "%{logmessage}" }
|
|
|
|
}
|
|
|
|
mutate {
|
|
|
|
remove_field => [ "logdate", "logmessage" ]
|
|
|
|
}
|
2013-01-16 15:01:25 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
output {
|
2015-11-19 09:49:04 -08:00
|
|
|
elasticsearch {
|
2016-03-10 15:12:01 -08:00
|
|
|
hosts => <%= @elasticsearch_nodes.map { |node| node + ":9200" }.inspect %>
|
2014-02-11 17:03:14 -08:00
|
|
|
manage_template => false
|
2014-07-07 16:50:02 -07:00
|
|
|
flush_size => 1024
|
2013-01-16 15:01:25 -08:00
|
|
|
}
|
|
|
|
}
|