Browse Source

Remove no longer used logstash config

This created confusion when updating configs to handle journald. Remove
the unused files and update docs to point at the proper config location.

Change-Id: Ifd8d8868b124b72a86cf7b5acb30480e72b903ed
Clark Boylan 2 years ago
parent
commit
90e867b0e3

+ 4
- 4
doc/source/logstash.rst View File

@@ -172,23 +172,23 @@ schema.
172 172
 
173 173
 The config file that tells Logstash how to do this flattening can be
174 174
 found at
175
-:cgit_file:`modules/openstack_project/templates/logstash/indexer.conf.erb`
175
+https://git.openstack.org/cgit/openstack-infra/logstash-filters/tree/filters/openstack-filters.conf
176 176
 
177 177
 This works via the tags that are associated with a given message.
178 178
 
179 179
 The tags in
180
-:cgit_file:`modules/openstack_project/templates/logstash/indexer.conf.erb`
180
+https://git.openstack.org/cgit/openstack-infra/logstash-filters/tree/filters/openstack-filters.conf
181 181
 are used to tell logstash how to parse a given file's messages, based
182 182
 on the file's message format.
183 183
 
184 184
 When adding a new file to be indexed to
185 185
 http://git.openstack.org/cgit/openstack-infra/project-config/tree/roles/submit-logstash-jobs/defaults/main.yaml
186
-at least one tag from the indexer.conf.erb file should be associated
186
+at least one tag from the openstack-filters.conf file should be associated
187 187
 with the new file.  One can expect to see '{%logmessage%}' instead of
188 188
 actual message data if indexing is not working properly.
189 189
 
190 190
 In the event a new file's format is not covered, a patch for
191
-:cgit_file:`modules/openstack_project/templates/logstash/indexer.conf.erb`
191
+https://git.openstack.org/cgit/openstack-infra/logstash-filters/tree/filters/openstack-filters.conf
192 192
 should be submitted with an appropriate parsing pattern.
193 193
 
194 194
 ElasticSearch

+ 0
- 47
modules/openstack_project/templates/logstash/agent.conf.erb View File

@@ -1,47 +0,0 @@
1
-input {
2
-  pipe {
3
-    command => "python3 /usr/local/bin/log-pusher.py -r -z tcp://jenkins.openstack.org:8888 -l http://logs.openstack.org -f console.html"
4
-    format => "json"
5
-    message_format => "%{event_message}"
6
-    tags => ["jenkins", "console"]
7
-    type => "jenkins_console"
8
-  }
9
-}
10
-
11
-# You can check grok patterns at http://grokdebug.herokuapp.com/
12
-filter {
13
-  grep {
14
-    type => "jenkins_console"
15
-    # Drop matches.
16
-    negate => true
17
-    match => ["@message", "^</?pre>$"]
18
-  }
19
-  multiline {
20
-    type => "jenkins_console"
21
-    negate => true
22
-    pattern => "^%{DATESTAMP} \|"
23
-    what => "previous"
24
-  }
25
-  grok {
26
-    type => "jenkins_console"
27
-    pattern => [ "^%{DATESTAMP:logdate} \| %{GREEDYDATA:logmessage}" ]
28
-    add_field => [ "received_at", "%{@timestamp}" ]
29
-  }
30
-  date {
31
-    type => "jenkins_console"
32
-    exclude_tags => "_grokparsefailure"
33
-    match => [ "logdate", "yyyy-MM-dd HH:mm:ss.SSS" ]
34
-  }
35
-  mutate {
36
-    type => "jenkins_console"
37
-    exclude_tags => "_grokparsefailure"
38
-    replace => [ "@message", "%{logmessage}" ]
39
-  }
40
-  mutate {
41
-    type => "jenkins_console"
42
-    exclude_tags => "_grokparsefailure"
43
-    remove => [ "logdate", "logmessage" ]
44
-  }
45
-}
46
-
47
-<%= scope.function_template(['openstack_project/logstash/redis-output.conf.erb']) %>

+ 0
- 124
modules/openstack_project/templates/logstash/indexer.conf.erb View File

@@ -1,124 +0,0 @@
1
-input {
2
-  tcp {
3
-    host => "localhost"
4
-    port => 9999
5
-    codec => json_lines {}
6
-    type => "jenkins"
7
-  }
8
-}
9
-
10
-# You can check grok patterns at http://grokdebug.herokuapp.com/
11
-filter {
12
-  if "screen" in [tags] and [message] =~ "^\+ " {
13
-    drop {}
14
-  }
15
-  if "console" in [tags] or "console.html" in [tags] {
16
-    if [message] == "<pre>" or [message] == "</pre>" {
17
-      drop {}
18
-    }
19
-    multiline {
20
-      negate => true
21
-      pattern => "^%{TIMESTAMP_ISO8601} \|"
22
-      what => "previous"
23
-      stream_identity => "%{host}.%{filename}"
24
-    }
25
-    grok {
26
-      # Do multiline matching as the above mutliline filter may add newlines
27
-      # to the log messages.
28
-      match => { "message" => "(?m)^%{TIMESTAMP_ISO8601:logdate} \| %{GREEDYDATA:logmessage}" }
29
-      add_field => { "received_at" => "%{@timestamp}" }
30
-    }
31
-  } else if "oslofmt" in [tags] {
32
-    multiline {
33
-      negate => true
34
-      pattern => "^(%{TIMESTAMP_ISO8601}|%{SYSLOGTIMESTAMP}) "
35
-      what => "previous"
36
-      stream_identity => "%{host}.%{filename}"
37
-    }
38
-    multiline {
39
-      negate => false
40
-      # NOTE(mriedem): oslo.log 1.2.0 changed the logging_exception_prefix
41
-      # config option from using TRACE to ERROR so we have to handle both.
42
-      #
43
-      # NOTE(sdague): stack traces always include process id, so
44
-      # NUMBER being required element here is important, otherwise
45
-      # ERROR messages just fold into the previous messages, which are
46
-      # typically INFO.
47
-      pattern => "^(%{TIMESTAMP_ISO8601}|%{SYSLOGTIMESTAMP})%{SPACE}%{NUMBER}%{SPACE}(TRACE|ERROR)"
48
-      what => "previous"
49
-      stream_identity => "%{host}.%{filename}"
50
-    }
51
-    grok {
52
-      # Do multiline matching as the above mutliline filter may add newlines
53
-      # to the log messages.
54
-      # TODO move the LOGLEVELs into a proper grok pattern.
55
-      match => { "message" => "(?m)^(%{TIMESTAMP_ISO8601:logdate}|%{SYSLOGTIMESTAMP:logdate})%{SPACE}(%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:|%{NUMBER:pid})?%{SPACE}?(?<loglevel>AUDIT|CRITICAL|DEBUG|INFO|TRACE|WARNING|ERROR) \[?\b%{NOTSPACE:module}\b\]?%{SPACE}?%{GREEDYDATA:logmessage}?" }
56
-      add_field => { "received_at" => "%{@timestamp}" }
57
-    }
58
-  } else if "apachecombined" in [tags] {
59
-    grok {
60
-      match => { "message" => "%{COMBINEDAPACHELOG}" }
61
-      add_field => { "received_at" => "%{@timestamp}" }
62
-      add_field => { "logdate" => "%{timestamp}" }
63
-      add_field => { "logmessage" => "%{verb} %{request} %{response}" }
64
-    }
65
-  } else if "apacheerror" in [tags] {
66
-    grok {
67
-      match => { "message" => "\[(?<logdate>%{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}%{SPACE}%{TZ}?)\]%{SPACE}\[%{LOGLEVEL:loglevel}\]%{SPACE}%{GREEDYDATA:logmessage}" }
68
-      add_field => { "received_at" => "%{@timestamp}" }
69
-    }
70
-  } else if "libvirt" in [tags] {
71
-    grok {
72
-      # libvirtd grok filter adapted from
73
-      # https://github.com/OpenStratus/openstack-logstash/blob/master/agent.conf
74
-      match => { "message" => "%{TIMESTAMP_ISO8601:logdate}:%{SPACE}%{NUMBER:pid}:%{SPACE}%{LOGLEVEL:loglevel}%{SPACE}:%{SPACE}%{GREEDYDATA:logmessage}" }
75
-      add_field => { "received_at" => "%{@timestamp}" }
76
-    }
77
-  } else if "syslog" in [tags] {
78
-    grok {
79
-      # Syslog grok filter adapted from
80
-      # http://cookbook.logstash.net/recipes/syslog-pri/syslog.conf
81
-      match => { "message" => "%{SYSLOGTIMESTAMP:logdate}%{SPACE}%{SYSLOGHOST:syslog_host}?%{SPACE}%{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?:? %{GREEDYDATA:logmessage}" }
82
-      add_field => { "received_at" => "%{@timestamp}" }
83
-    }
84
-  }
85
-
86
-  # Filters below here should be consistent for all Jenkins log formats.
87
-  # Remove DEBUG logs to reduce the amount of data that needs to be processed.
88
-  if [loglevel] == "DEBUG" {
89
-    drop {}
90
-  }
91
-
92
-  if ! ("_grokparsefailure" in [tags]) {
93
-    date {
94
-      match => [ "logdate",
95
-                 "yyyy-MM-dd HH:mm:ss.SSS",
96
-                 "yyyy-MM-dd HH:mm:ss.SSSSSS",
97
-                 "yyyy-MM-dd HH:mm:ss,SSS",
98
-                 "yyyy-MM-dd HH:mm:ss",
99
-                 "MMM  d HH:mm:ss",
100
-                 "MMM dd HH:mm:ss",
101
-                 "dd/MMM/yyyy:HH:mm:ss Z",
102
-                 "yyyy-MM-dd HH:mm:ss.SSSZ",
103
-                 "E MMM dd HH:mm:ss yyyy Z",
104
-                 "E MMM dd HH:mm:ss yyyy",
105
-                 "ISO8601"
106
-               ]
107
-      timezone => "UTC"
108
-    }
109
-    mutate {
110
-      replace => { "message" => "%{logmessage}" }
111
-    }
112
-    mutate {
113
-      remove_field => [ "logdate", "logmessage" ]
114
-    }
115
-  }
116
-}
117
-
118
-output {
119
-  elasticsearch {
120
-    hosts => <%= @elasticsearch_nodes.map { |node| node + ":9200" }.inspect %>
121
-    manage_template => false
122
-    flush_size => 1024
123
-  }
124
-}

Loading…
Cancel
Save