Merge "Make Heka send logs to Elasticsearch"

This commit is contained in:
Jenkins 2016-03-05 15:36:44 +00:00 committed by Gerrit Code Review
commit 08101fd465
15 changed files with 98 additions and 34 deletions

View File

@ -130,7 +130,7 @@ rgw_port: "6780"
mistral_api_port: "8989" mistral_api_port: "8989"
kibana_port: "5601" kibana_server_port: "5601"
elasticsearch_port: "9200" elasticsearch_port: "9200"
@ -184,7 +184,7 @@ enable_murano: "no"
enable_ironic: "no" enable_ironic: "no"
enable_magnum: "no" enable_magnum: "no"
enable_mistral: "no" enable_mistral: "no"
enable_elk: "no" enable_central_logging: "no"
enable_mongodb: "no" enable_mongodb: "no"
enable_manila: "no" enable_manila: "no"

View File

@ -46,3 +46,11 @@
- "swift-object-updater" - "swift-object-updater"
- "swift-proxy-server" - "swift-proxy-server"
- "swift-rsyncd" - "swift-rsyncd"
- name: Copying over heka elasticsearch config file
template:
src: "heka-{{ item }}.toml.j2"
dest: "{{ node_config_directory }}/heka/heka-{{ item }}.toml"
with_items:
- "elasticsearch"
when: "{{ enable_central_logging | bool }}"

View File

@ -0,0 +1,16 @@
[elasticsearch_json_encoder]
type = "ESJsonEncoder"
index = {{'"%{Type}-%{%Y.%m.%d}"'}}
es_index_from_timestamp = true
fields = ["Timestamp", "Type", "Logger", "Severity", "Payload", "Pid", "Hostname", "DynamicFields"]
[elasticsearch_output]
type = "ElasticSearchOutput"
server = "{{ internal_protocol }}://{{ kolla_internal_vip_address }}:{{ elasticsearch_port }}"
message_matcher = "Type == 'log'"
encoder = "elasticsearch_json_encoder"
use_buffering = true
[elasticsearch_output.buffering]
max_buffer_size = 1073741824 # 1024 * 1024 * 1024
max_file_size = 134217728 # 128 * 1024 * 1024
full_action = "drop"

View File

@ -3,12 +3,12 @@
[haproxy_file_output] [haproxy_file_output]
type = "FileOutput" type = "FileOutput"
message_matcher = "Type == 'Syslog' && Fields[programname] =~ /(?i:haproxy)/" message_matcher = "Fields[programname] =~ /(?i:haproxy)/"
path = "/var/log/kolla/haproxy/haproxy.log" path = "/var/log/kolla/haproxy/haproxy.log"
encoder = "syslog_encoder" encoder = "syslog_encoder"
[keepalived_file_output] [keepalived_file_output]
type = "FileOutput" type = "FileOutput"
message_matcher = "Type == 'Syslog' && Fields[programname] =~ /(?i:keepalived)/" message_matcher = "Fields[programname] =~ /(?i:keepalived)/"
path = "/var/log/kolla/haproxy/keepalived.log" path = "/var/log/kolla/haproxy/keepalived.log"
encoder = "syslog_encoder" encoder = "syslog_encoder"

View File

@ -3,6 +3,6 @@
[{{ item }}_file_output] [{{ item }}_file_output]
type = "FileOutput" type = "FileOutput"
message_matcher = "Type == 'Syslog' && Fields[programname] == '{{ item }}'" message_matcher = "Fields[programname] == '{{ item }}'"
path = "/var/log/kolla/swift/{{ item }}.log" path = "/var/log/kolla/swift/{{ item }}.log"
encoder = "syslog_encoder" encoder = "syslog_encoder"

View File

@ -2,6 +2,13 @@
{ {
"command": "/usr/bin/hekad -config=/etc/heka/", "command": "/usr/bin/hekad -config=/etc/heka/",
"config_files": [ "config_files": [
{
"source": "{{ container_config_directory }}/heka-elasticsearch.toml",
"dest": "/etc/heka/heka-elasticsearch.toml",
"owner": "heka",
"perm": "0600",
"optional": "True"
},
{ {
"source": "{{ container_config_directory }}/heka-global.toml", "source": "{{ container_config_directory }}/heka-global.toml",
"dest": "/etc/heka/heka-global.toml", "dest": "/etc/heka/heka-global.toml",

View File

@ -2,8 +2,11 @@
#################### ####################
# Elasticsearch # Elasticsearch
#################### ####################
elasticsearch_port: "{{ elasticsearch_port }}" elasticsearch_cluster_name: "kolla_logging"
elasticsearch_host: "{{ kolla_internal_vip_address }}"
####################
# Docker
####################
elasticsearch_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ kolla_install_type }}-elasticsearch" elasticsearch_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ kolla_install_type }}-elasticsearch"
elasticsearch_tag: "{{ openstack_release }}" elasticsearch_tag: "{{ openstack_release }}"
elasticsearch_image_full: "{{ elasticsearch_image }}:{{ elasticsearch_tag }}" elasticsearch_image_full: "{{ elasticsearch_image }}:{{ elasticsearch_tag }}"

View File

@ -1,4 +1,17 @@
network.host: {{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }} {% set num_nodes = groups['elasticsearch'] | length %}
{% set minimum_master_nodes = (num_nodes / 2 + 1) | round(0, 'floor') | int if num_nodes > 2 else 1 %}
{% set recover_after_nodes = (num_nodes * 2 / 3) | round(0, 'floor') | int if num_nodes > 1 else 1 %}
node.name: "{{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }}"
network.host: "{{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }}"
cluster.name: "{{ elasticsearch_cluster_name }}"
node.master: true
node.data: true
discovery.zen.ping.unicast.hosts: [{% for host in groups['elasticsearch'] %}"{{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}"{% if not loop.last %},{% endif %}{% endfor %}]
discovery.zen.minimum_master_nodes: {{ minimum_master_nodes }}
gateway.expected_nodes: {{ num_nodes }}
gateway.recover_after_time: "5m"
gateway.recover_after_nodes: {{ recover_after_nodes }}
path.conf: "/etc/elasticsearch" path.conf: "/etc/elasticsearch"
path.data: "/var/lib/elasticsearch/data" path.data: "/var/lib/elasticsearch/data"
path.logs: "/var/log/elasticsearch" path.logs: "/var/log/elasticsearch"

View File

@ -376,3 +376,26 @@ listen radosgw_external
{% endfor %} {% endfor %}
{% endif %} {% endif %}
{% endif %} {% endif %}
{% if enable_central_logging | bool %}
listen kibana
bind {{ kolla_internal_vip_address }}:{{ kibana_server_port }}
{% for host in groups['kibana'] %}
server {{ hostvars[host]['ansible_hostname'] }} {{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}:{{ kibana_server_port }} check inter 2000 rise 2 fall 5
{% endfor %}
{% if haproxy_enable_external_vip | bool %}
listen kibana_external
bind {{ kolla_external_vip_address }}:{{ kibana_server_port }}
{% for host in groups['kibana'] %}
server {{ hostvars[host]['ansible_hostname'] }} {{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}:{{ kibana_server_port }} check inter 2000 rise 2 fall 5
{% endfor %}
{% endif %}
listen elasticsearch
option dontlog-normal
bind {{ kolla_internal_vip_address }}:{{ elasticsearch_port }}
{% for host in groups['elasticsearch'] %}
server {{ hostvars[host]['ansible_hostname'] }} {{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}:{{ elasticsearch_port }} check inter 2000 rise 2 fall 5
{% endfor %}
{% endif %}

View File

@ -2,12 +2,10 @@
#################### ####################
# Kibana # Kibana
#################### ####################
kibana_port: "{{ kibana_port }}" kibana_default_app_id: "discover"
kibana_host: "{{ kolla_internal_vip_address }}" kibana_elasticsearch_request_timeout: 300000
kibana_app_id: "discover" kibana_elasticsearch_shard_timeout: 0
kibana_request_timeout: 300000 kibana_elasticsearch_ssl_verify: false
kibana_shard_timeout: 0
kibana_verify_ssl: false
#################### ####################
@ -16,9 +14,3 @@ kibana_verify_ssl: false
kibana_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ kolla_install_type }}-kibana" kibana_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ kolla_install_type }}-kibana"
kibana_tag: "{{ openstack_release }}" kibana_tag: "{{ openstack_release }}"
kibana_image_full: "{{ kibana_image }}:{{ kibana_tag }}" kibana_image_full: "{{ kibana_image }}:{{ kibana_tag }}"
####################
# Elasticsearch
####################
elasticsearch_preserve_host: "true"

View File

@ -1,11 +1,10 @@
port: {{ kibana_port }} kibana.defaultAppId: "{{ kibana_default_app_id }}"
host: {{ kibana_host }} server.port: {{ kibana_server_port }}
elasticsearch_url: "{{ internal_protocol }}://{{ kolla_internal_fqdn }}:{{ elasticsearch_port }}" server.host: "{{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }}"
elasticsearch_preserve_host: {{ elasticsearch_preserve_host }} elasticsearch.url: "{{ internal_protocol }}://{{ hostvars[inventory_hostname]['ansible_' + api_interface]['ipv4']['address'] }}:{{ elasticsearch_port }}"
default_app_id: {{ kibana_app_id }} elasticsearch.requestTimeout: {{ kibana_elasticsearch_request_timeout }}
request_timeout: {{ kibana_request_timeout }} elasticsearch.shardTimeout: {{ kibana_elasticsearch_shard_timeout }}
shard_timeout: {{ kibana_shard_timeout }} elasticsearch.ssl.verify: {{ kibana_elasticsearch_ssl_verify }}
verify_ssl: {{ kibana_verify_ssl }}
bundled_plugin_ids: bundled_plugin_ids:
- plugins/dashboard/index - plugins/dashboard/index
- plugins/discover/index - plugins/discover/index

View File

@ -30,13 +30,13 @@
roles: roles:
- { role: kibana, - { role: kibana,
tags: kibana, tags: kibana,
when: enable_elk | bool } when: enable_central_logging | bool }
- hosts: elasticsearch - hosts: elasticsearch
roles: roles:
- { role: elasticsearch, - { role: elasticsearch,
tags: elasticsearch, tags: elasticsearch,
when: enable_elk | bool } when: enable_central_logging | bool }
- hosts: memcached - hosts: memcached
roles: roles:

View File

@ -20,7 +20,7 @@ local utils = require "os_utils"
local msg = { local msg = {
Timestamp = nil, Timestamp = nil,
Type = 'Syslog', Type = 'log',
Hostname = read_config("hostname"), Hostname = read_config("hostname"),
Payload = nil, Payload = nil,
Pid = nil, Pid = nil,

View File

@ -20,7 +20,7 @@ local utils = require "os_utils"
local msg = { local msg = {
Timestamp = nil, Timestamp = nil,
Type = 'Syslog', Type = 'log',
Hostname = read_config("hostname"), Hostname = read_config("hostname"),
Payload = nil, Payload = nil,
Pid = nil, Pid = nil,

View File

@ -33,7 +33,9 @@ else
openvswitch_{vswitchd,db} \ openvswitch_{vswitchd,db} \
rabbitmq{,_bootstrap} \ rabbitmq{,_bootstrap} \
heka \ heka \
swift_{account_{auditor,reaper,replicator,server},container_{auditor,replicator,server,updater},object_{auditor,expirer,replicator,server,updater},proxy_server,rsyncd} swift_{account_{auditor,reaper,replicator,server},container_{auditor,replicator,server,updater},object_{auditor,expirer,replicator,server,updater},proxy_server,rsyncd} \
elasticsearch \
kibana
) )
ceph_osd_bootstrap=$(docker ps -a --filter "name=bootstrap_osd_*" --format "{{.Names}}") ceph_osd_bootstrap=$(docker ps -a --filter "name=bootstrap_osd_*" --format "{{.Names}}")
ceph_osd_containers=$(docker ps -a --filter "name=ceph_osd_*" --format "{{.Names}}") ceph_osd_containers=$(docker ps -a --filter "name=ceph_osd_*" --format "{{.Names}}")
@ -51,7 +53,8 @@ else
mongodb \ mongodb \
haproxy_socket \ haproxy_socket \
heka{,_socket} \ heka{,_socket} \
kolla_logs kolla_logs \
elasticsearch
) )
fi fi