372e991bec
As described here: https://github.com/openstack/keystone/blob/master/keystone/resource/core.py#L841 https://github.com/openstack/keystone/blob/master/keystone/conf/identity.py#L21 * default project domain name MUST be named 'Default' * default project domain id MUST be named 'default' * default project user name MUST be named 'Default' * default project user id MUST be named 'default' Change-Id: I610a0416647fdea31bb04889364da5395d8c8d74
169 lines
5.9 KiB
Django/Jinja
169 lines
5.9 KiB
Django/Jinja
[DEFAULT]
|
|
debug = {{ cinder_logging_debug }}
|
|
|
|
log_dir = /var/log/kolla/cinder
|
|
use_forwarded_for = true
|
|
|
|
# Set use_stderr to False or the logs will also be sent to stderr
|
|
# and collected by Docker
|
|
use_stderr = False
|
|
|
|
osapi_volume_workers = {{ openstack_service_workers }}
|
|
volume_name_template = volume-%s
|
|
|
|
glance_api_servers = {{ internal_protocol }}://{% if orchestration_engine == 'KUBERNETES' %}glance-api{% else %}{{ kolla_internal_vip_address }}{% endif %}:{{ glance_api_port }}
|
|
|
|
glance_num_retries = {{ groups['glance-api'] | length }}
|
|
|
|
os_region_name = {{ openstack_region_name }}
|
|
|
|
{% if cinder_enabled_backends %}
|
|
enabled_backends = {{ cinder_enabled_backends|map(attribute='name')|join(',') }}
|
|
{% endif %}
|
|
|
|
{% if service_name == "cinder-backup" %}
|
|
{% if enable_ceph | bool and cinder_backup_driver == "ceph" %}
|
|
backup_driver = cinder.backup.drivers.ceph
|
|
backup_ceph_conf = /etc/ceph/ceph.conf
|
|
backup_ceph_user = cinder-backup
|
|
backup_ceph_chunk_size = 134217728
|
|
backup_ceph_pool = {{ ceph_cinder_backup_pool_name }}
|
|
backup_ceph_stripe_unit = 0
|
|
backup_ceph_stripe_count = 0
|
|
restore_discard_excess_bytes = true
|
|
{% elif cinder_backup_driver == "nfs" %}
|
|
backup_driver = cinder.backup.drivers.nfs
|
|
backup_mount_options = {{ cinder_backup_mount_options_nfs }}
|
|
backup_mount_point_base = /var/lib/cinder/backup
|
|
backup_share = {{ cinder_backup_share }}
|
|
backup_file_size = 327680000
|
|
{% elif enable_swift | bool and cinder_backup_driver == "swift" %}
|
|
backup_driver = cinder.backup.drivers.swift
|
|
backup_swift_url = http://{{ kolla_internal_vip_address }}:{{ swift_proxy_server_port }}/v1/AUTH_
|
|
backup_swift_auth = per_user
|
|
backup_swift_auth_version = 1
|
|
backup_swift_user =
|
|
backup_swift_key =
|
|
{% endif %}
|
|
{% endif %}
|
|
|
|
osapi_volume_listen = {{ api_interface_address }}
|
|
osapi_volume_listen_port = {{ cinder_api_port }}
|
|
|
|
api_paste_config = /etc/cinder/api-paste.ini
|
|
|
|
auth_strategy = keystone
|
|
|
|
transport_url = rabbit://{% for host in groups['rabbitmq'] %}{{ rabbitmq_user }}:{{ rabbitmq_password }}@{% if orchestration_engine == 'KUBERNETES' %}rabbitmq{% else %}{{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}{% endif %}:{{ rabbitmq_port }}{% if not loop.last %},{% endif %}{% endfor %}
|
|
|
|
[oslo_messaging_notifications]
|
|
{% if enable_ceilometer | bool or enable_searchlight | bool %}
|
|
driver = messagingv2
|
|
topics = notifications
|
|
{% else %}
|
|
driver = noop
|
|
{% endif %}
|
|
|
|
[nova]
|
|
region_name = {{ openstack_region_name }}
|
|
interface = internal
|
|
token_auth_url = {{ internal_protocol }}://{{ kolla_internal_fqdn }}:{{ keystone_public_port }}
|
|
|
|
[database]
|
|
connection = mysql+pymysql://{{ cinder_database_user }}:{{ cinder_database_password }}@{% if orchestration_engine == 'KUBERNETES' %}{{ cinder_database_address }}{% else %}{{ cinder_database_address }}{% endif %}/{{ cinder_database_name }}
|
|
max_retries = -1
|
|
|
|
[keystone_authtoken]
|
|
{% if orchestration_engine == 'KUBERNETES' %}
|
|
auth_uri = {{ keystone_internal_url }}
|
|
auth_url = {{ keystone_admin_url }}
|
|
{% else %}
|
|
auth_uri = {{ internal_protocol }}://{{ kolla_internal_fqdn }}:{{ keystone_public_port }}
|
|
auth_url = {{ admin_protocol }}://{{ kolla_internal_fqdn }}:{{ keystone_admin_port }}
|
|
{% endif %}
|
|
auth_type = password
|
|
project_domain_id = {{ default_project_domain_id }}
|
|
user_domain_id = {{ default_user_domain_id }}
|
|
project_name = service
|
|
username = {{ cinder_keystone_user }}
|
|
password = {{ cinder_keystone_password }}
|
|
|
|
memcache_security_strategy = ENCRYPT
|
|
memcache_secret_key = {{ memcache_secret_key }}
|
|
memcached_servers = {% for host in groups['memcached'] %}{% if orchestration_engine == 'KUBERNETES' %}memcached{% else %}{{ hostvars[host]['ansible_' + hostvars[host]['api_interface']]['ipv4']['address'] }}{% endif %}:{{ memcached_port }}{% if not loop.last %},{% endif %}{% endfor %}
|
|
|
|
|
|
[oslo_concurrency]
|
|
lock_path = /var/lib/cinder/tmp
|
|
|
|
|
|
{% if enable_cinder_backend_lvm | bool %}
|
|
[lvm-1]
|
|
volume_group = {{ cinder_volume_group }}
|
|
volume_driver = cinder.volume.drivers.lvm.LVMVolumeDriver
|
|
volume_backend_name = lvm-1
|
|
iscsi_helper = {{ cinder_iscsi_helper }}
|
|
iscsi_protocol = iscsi
|
|
{% endif %}
|
|
|
|
{% if enable_ceph | bool and cinder_backend_ceph | bool %}
|
|
[rbd-1]
|
|
volume_driver = cinder.volume.drivers.rbd.RBDDriver
|
|
rbd_pool = {{ ceph_cinder_pool_name }}
|
|
rbd_ceph_conf = /etc/ceph/ceph.conf
|
|
rbd_flatten_volume_from_snapshot = false
|
|
rbd_max_clone_depth = 5
|
|
rbd_store_chunk_size = 4
|
|
rados_connect_timeout = 5
|
|
rbd_user = cinder
|
|
rbd_secret_uuid = {{ cinder_rbd_secret_uuid }}
|
|
report_discard_supported = True
|
|
{% endif %}
|
|
|
|
{% if enable_cinder_backend_nfs | bool %}
|
|
[nfs-1]
|
|
volume_driver = cinder.volume.drivers.nfs.NfsDriver
|
|
volume_backend_name = nfs-1
|
|
nfs_shares_config = /etc/cinder/nfs_shares
|
|
{% endif %}
|
|
|
|
{% if enable_cinder_backend_hnas_iscsi | bool %}
|
|
[hnas-iscsi]
|
|
volume_driver = cinder.volume.drivers.hitachi.hnas_iscsi.HNASISCSIDriver
|
|
volume_backend_name = {{ hnas_iscsi_backend }}
|
|
hnas_username = {{ hnas_iscsi_username }}
|
|
hnas_password = {{ hnas_iscsi_password }}
|
|
hnas_mgmt_ip0 = {{ hnas_iscsi_mgmt_ip0 }}
|
|
hnas_chap_enabled = True
|
|
|
|
hnas_svc0_volume_type = {{ hnas_iscsi_svc0_volume_type }}
|
|
hnas_svc0_hdp = {{ hnas_iscsi_svc0_hdp }}
|
|
hnas_svc0_iscsi_ip = {{ hnas_iscsi_svc0_ip }}
|
|
{% endif %}
|
|
|
|
{% if enable_cinder_backend_hnas_nfs | bool %}
|
|
[hnas-nfs]
|
|
volume_driver = cinder.volume.drivers.hitachi.hnas_nfs.HNASNFSDriver
|
|
nfs_shares_config = /home/cinder/nfs_shares
|
|
volume_backend_name = {{ hnas_nfs_backend }}
|
|
hnas_username = {{ hnas_nfs_username }}
|
|
hnas_password = {{ hnas_nfs_password }}
|
|
hnas_mgmt_ip0 = {{ hnas_nfs_mgmt_ip0 }}
|
|
|
|
hnas_svc0_volume_type = {{ hnas_nfs_svc0_volume_type }}
|
|
hnas_svc0_hdp = {{ hnas_nfs_svc0_hdp }}
|
|
{% endif %}
|
|
|
|
[privsep_entrypoint]
|
|
helper_command=sudo cinder-rootwrap /etc/cinder/rootwrap.conf privsep-helper --config-file /etc/cinder/cinder.conf
|
|
|
|
{% if enable_osprofiler | bool %}
|
|
[profiler]
|
|
enabled = true
|
|
trace_sqlalchemy = true
|
|
hmac_keys = {{ osprofiler_secret }}
|
|
{% if enable_elasticsearch | bool %}
|
|
connection_string = elasticsearch://{{ elasticsearch_address }}:{{ elasticsearch_port }}
|
|
{% endif %}
|
|
{% endif %}
|