
After all of the discussions we had on "https://review.opendev.org/#/c/670626/2", I studied all projects that have an "oslo_messaging" section. Afterwards, I applied the same method that is already used in "oslo_messaging" section in Nova, Cinder, and others. This guarantees that we have a consistent method to enable/disable notifications across projects based on components (e.g. Ceilometer) being enabled or disabled. Here follows the list of components, and the respective changes I did. * Aodh: The section is declared, but it is not used. Therefore, it will be removed in an upcomming PR. * Congress: The section is declared, but it is not used. Therefore, it will be removed in an upcomming PR. * Cinder: It was already properly configured. * Octavia: The section is declared, but it is not used. Therefore, it will be removed in an upcomming PR. * Heat: It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Ceilometer: Ceilometer publishes some messages in the rabbitMQ. However, the default driver is "messagingv2", and not ''(empty) as defined in Oslo; these configurations are defined in ceilometer/publisher/messaging.py. Therefore, we do not need to do anything for the "oslo_messaging_notifications" section in Ceilometer * Tacker: It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Neutron: It was already properly configured. * Nova It was already properly configured. However, we found another issue with its configuration. Kolla-ansible does not configure nova notifications as it should. If 'searchlight' is not installed (enabled) the 'notification_format' should be 'unversioned'. The default is 'both'; so nova will send a notification to the queue versioned_notifications; but that queue has no consumer when 'searchlight' is disabled. In our case, the queue got 511k messages. The huge amount of "stuck" messages made the Rabbitmq cluster unstable. https://bugzilla.redhat.com/show_bug.cgi?id=1478274 https://bugs.launchpad.net/ceilometer/+bug/1665449 * Nova_hyperv: I added the same configurations as in Nova project. * Vitrage It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Searchlight I created a mechanism similar to what we have in AODH, Cinder, Nova, and others. * Ironic I created a mechanism similar to what we have in AODH, Cinder, Nova, and others. * Glance It was already properly configured. * Trove It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Blazar It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Sahara It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Watcher I created a mechanism similar to what we have in AODH, Cinder, Nova, and others. * Barbican I created a mechanism similar to what we have in Cinder, Nova, and others. I also added a configuration to 'keystone_notifications' section. Barbican needs its own queue to capture events from Keystone. Otherwise, it has an impact on Ceilometer and other systems that are connected to the "notifications" default queue. * Keystone Keystone is the system that triggered this work with the discussions that followed on https://review.opendev.org/#/c/670626/2. After a long discussion, we agreed to apply the same approach that we have in Nova, Cinder and other systems in Keystone. That is what we did. Moreover, we introduce a new topic "barbican_notifications" when barbican is enabled. We also removed the "variable" enable_cadf_notifications, as it is obsolete, and the default in Keystone is CADF. * Mistral: It was hardcoded "noop" as the driver. However, that does not seem a good practice. Instead, I applied the same standard of using the driver and pushing to "notifications" queue if Ceilometer is enabled. * Cyborg: I created a mechanism similar to what we have in AODH, Cinder, Nova, and others. * Murano It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Senlin It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Manila It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Zun The section is declared, but it is not used. Therefore, it will be removed in an upcomming PR. * Designate It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components * Magnum It was already using a similar scheme; I just modified it a little bit to be the same as we have in all other components Closes-Bug: #1838985 Change-Id: I88bdb004814f37c81c9a9c4e5e491fac69f6f202 Signed-off-by: Rafael Weingärtner <rafael@apache.org>
131 lines
4.9 KiB
YAML
131 lines
4.9 KiB
YAML
---
|
|
project_name: "keystone"
|
|
|
|
keystone_services:
|
|
keystone:
|
|
container_name: "keystone"
|
|
group: "keystone"
|
|
enabled: true
|
|
image: "{{ keystone_image_full }}"
|
|
volumes: "{{ keystone_default_volumes + keystone_extra_volumes }}"
|
|
dimensions: "{{ keystone_dimensions }}"
|
|
haproxy:
|
|
keystone_internal:
|
|
enabled: "{{ enable_keystone }}"
|
|
mode: "http"
|
|
external: false
|
|
port: "{{ keystone_public_port }}"
|
|
listen_port: "{{ keystone_public_listen_port }}"
|
|
keystone_external:
|
|
enabled: "{{ enable_keystone }}"
|
|
mode: "http"
|
|
external: true
|
|
port: "{{ keystone_public_port }}"
|
|
listen_port: "{{ keystone_public_listen_port }}"
|
|
keystone_admin:
|
|
enabled: "{{ enable_keystone }}"
|
|
mode: "http"
|
|
external: false
|
|
port: "{{ keystone_admin_port }}"
|
|
listen_port: "{{ keystone_admin_listen_port }}"
|
|
keystone-ssh:
|
|
container_name: "keystone_ssh"
|
|
group: "keystone"
|
|
enabled: "{{ keystone_token_provider == 'fernet' }}"
|
|
image: "{{ keystone_ssh_image_full }}"
|
|
volumes:
|
|
- "{{ node_config_directory }}/keystone-ssh/:{{ container_config_directory }}/:ro"
|
|
- "/etc/localtime:/etc/localtime:ro"
|
|
- "kolla_logs:/var/log/kolla/"
|
|
- "keystone_fernet_tokens:/etc/keystone/fernet-keys"
|
|
dimensions: "{{ keystone_ssh_dimensions }}"
|
|
keystone-fernet:
|
|
container_name: "keystone_fernet"
|
|
group: "keystone"
|
|
enabled: "{{ keystone_token_provider == 'fernet' }}"
|
|
image: "{{ keystone_fernet_image_full }}"
|
|
volumes:
|
|
- "{{ node_config_directory }}/keystone-fernet/:{{ container_config_directory }}/:ro"
|
|
- "/etc/localtime:/etc/localtime:ro"
|
|
- "kolla_logs:/var/log/kolla/"
|
|
- "keystone_fernet_tokens:/etc/keystone/fernet-keys"
|
|
dimensions: "{{ keystone_fernet_dimensions }}"
|
|
|
|
|
|
####################
|
|
# Database
|
|
####################
|
|
keystone_database_name: "keystone"
|
|
keystone_database_user: "{% if use_preconfigured_databases | bool and use_common_mariadb_user | bool %}{{ database_user }}{% else %}keystone{% endif %}"
|
|
keystone_database_address: "{{ database_address }}:{{ database_port }}"
|
|
|
|
|
|
####################
|
|
# Fernet
|
|
####################
|
|
keystone_username: "keystone"
|
|
keystone_groupname: "keystone"
|
|
|
|
|
|
####################
|
|
# Docker
|
|
####################
|
|
keystone_install_type: "{{ kolla_install_type }}"
|
|
keystone_tag: "{{ openstack_release }}"
|
|
|
|
keystone_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ keystone_install_type }}-keystone"
|
|
keystone_service_tag: "{{ keystone_tag }}"
|
|
keystone_image_full: "{{ keystone_image }}:{{ keystone_service_tag }}"
|
|
|
|
keystone_fernet_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ keystone_install_type }}-keystone-fernet"
|
|
keystone_fernet_tag: "{{ keystone_tag }}"
|
|
keystone_fernet_image_full: "{{ keystone_fernet_image }}:{{ keystone_fernet_tag }}"
|
|
|
|
keystone_ssh_image: "{{ docker_registry ~ '/' if docker_registry else '' }}{{ docker_namespace }}/{{ kolla_base_distro }}-{{ keystone_install_type }}-keystone-ssh"
|
|
keystone_ssh_tag: "{{ keystone_tag }}"
|
|
keystone_ssh_image_full: "{{ keystone_ssh_image }}:{{ keystone_ssh_tag }}"
|
|
|
|
keystone_dimensions: "{{ default_container_dimensions }}"
|
|
keystone_fernet_dimensions: "{{ default_container_dimensions }}"
|
|
keystone_ssh_dimensions: "{{ default_container_dimensions }}"
|
|
|
|
keystone_default_volumes:
|
|
- "{{ node_config_directory }}/keystone/:{{ container_config_directory }}/:ro"
|
|
- "/etc/localtime:/etc/localtime:ro"
|
|
- "{{ kolla_dev_repos_directory ~ '/keystone/keystone:/var/lib/kolla/venv/lib/python2.7/site-packages/keystone' if keystone_dev_mode | bool else '' }}"
|
|
- "kolla_logs:/var/log/kolla/"
|
|
- "{% if keystone_token_provider == 'fernet' %}keystone_fernet_tokens:/etc/keystone/fernet-keys{% endif %}"
|
|
|
|
keystone_extra_volumes: "{{ default_extra_volumes }}"
|
|
|
|
####################
|
|
# OpenStack
|
|
####################
|
|
keystone_logging_debug: "{{ openstack_logging_debug }}"
|
|
|
|
openstack_keystone_auth: "{{ openstack_auth }}"
|
|
|
|
|
|
####################
|
|
# Kolla
|
|
####################
|
|
keystone_git_repository: "{{ kolla_dev_repos_git }}/{{ project_name }}"
|
|
keystone_dev_repos_pull: "{{ kolla_dev_repos_pull }}"
|
|
keystone_dev_mode: "{{ kolla_dev_mode }}"
|
|
keystone_source_version: "{{ kolla_source_version }}"
|
|
|
|
|
|
####################
|
|
# Notifications
|
|
####################
|
|
keystone_default_notifications_topic_enabled: "{{ (enable_ceilometer | bool ) or (enable_cadf_notifications | bool)}}"
|
|
keystone_default_notifications_topic_name: "notifications"
|
|
|
|
keystone_notification_topics:
|
|
- name: "{{ keystone_default_notifications_topic_name }}"
|
|
enabled: "{{ keystone_default_notifications_topic_enabled | bool }}"
|
|
- name: barbican_notifications
|
|
enabled: "{{ enable_barbican | bool }}"
|
|
|
|
keystone_enabled_notification_topics: "{{ keystone_notification_topics | selectattr('enabled', 'equalto', true) | list }}"
|