resource_registry: OS::TripleO::Services::CephMgr: ../../deployment/ceph-ansible/ceph-mgr.yaml OS::TripleO::Services::CephMon: ../../deployment/ceph-ansible/ceph-mon.yaml OS::TripleO::Services::CephOSD: ../../deployment/ceph-ansible/ceph-osd.yaml OS::TripleO::Services::CephClient: ../../deployment/ceph-ansible/ceph-client.yaml OS::TripleO::Services::OsloMessagingRpc: ../../deployment/rabbitmq/rabbitmq-messaging-rpc-pacemaker-puppet.yaml OS::TripleO::Services::OsloMessagingNotify: ../../deployment/rabbitmq/rabbitmq-messaging-notify-shared-puppet.yaml OS::TripleO::Services::OctaviaApi: ../../deployment/octavia/octavia-api-container-puppet.yaml OS::TripleO::Services::OctaviaHousekeeping: ../../deployment/octavia/octavia-housekeeping-container-puppet.yaml OS::TripleO::Services::OctaviaHealthManager: ../../deployment/octavia/octavia-health-manager-container-puppet.yaml OS::TripleO::Services::OctaviaWorker: ../../deployment/octavia/octavia-worker-container-puppet.yaml OS::TripleO::Services::OctaviaDeploymentConfig: ../../deployment/octavia/octavia-deployment-config.yaml OS::TripleO::Services::CinderApi: OS::Heat::None OS::TripleO::Services::CinderBackup: OS::Heat::None OS::TripleO::Services::CinderScheduler: OS::Heat::None OS::TripleO::Services::CinderVolume: OS::Heat::None OS::TripleO::Services::SwiftProxy: OS::Heat::None OS::TripleO::Services::SwiftDispersion: OS::Heat::None OS::TripleO::Services::SwiftRingBuilder: OS::Heat::None OS::TripleO::Services::SwiftStorage: OS::Heat::None OS::TripleO::Services::SwiftRingBuilder: OS::Heat::None OS::TripleO::Services::SwiftStorage: OS::Heat::None OS::TripleO::Services::Horizon: OS::Heat::None parameter_defaults: StandaloneExtraConfig: octavia::controller::connection_retry_interval: 10 OctaviaAmphoraSshKeyFile: /home/zuul/.ssh/id_rsa.pub OctaviaAmphoraImageFilename: /home/zuul/amphora.qcow2 OctaviaCaKeyPassphrase: 'upstreamci' OctaviaManageNovaFlavor: true OctaviaGenerateCerts: true NodeDataLookup: AB4114B1-9C9D-409A-BEFB-D88C151BF2C3: {"foo": "bar"} 8CF1A7EA-7B4B-4433-AC83-17675514B1B8: {"foo2": "bar2"} Debug: true HideSensitiveLogs: false CephAnsibleDisksConfig: osd_objectstore: bluestore osd_scenario: lvm lvm_volumes: - data: ceph_lv_data data_vg: ceph_vg db: ceph_lv_db db_vg: ceph_vg wal: ceph_lv_wal wal_vg: ceph_vg CephPoolDefaultPgNum: 32 CephPoolDefaultSize: 1 CephAnsibleExtraConfig: centos_package_dependencies: [] ceph_osd_docker_memory_limit: '1g' ceph_mds_docker_memory_limit: '1g' mon_host_v1: { 'enabled': False } handler_health_mon_check_retries: 10 handler_health_mon_check_delay: 20 #NOTE: These ID's and keys should be regenerated for # a production deployment. What is here is suitable for # developer and CI testing only. CephClusterFSID: '4b5c8c0a-ff60-454b-a1b4-9747aa737d19' CephClientKey: 'AQC+vYNXgDAgAhAAc8UoYt+OTz5uhV7ItLdwUw==' CephAnsiblePlaybookVerbosity: 1 CephAnsibleEnvironmentVariables: ANSIBLE_SSH_RETRIES: 4 DEFAULT_FORKS: 3 NovaEnableRbdBackend: true CinderEnableRbdBackend: true CephAnsibleRepo: "tripleo-centos-ceph-nautilus" CinderBackupBackend: ceph GlanceBackend: rbd CinderEnableIscsiBackend: false BannerText: | ****************************************************************** * This system is for the use of authorized users only. Usage of * * this system may be monitored and recorded by system personnel. * * Anyone using this system expressly consents to such monitoring * * and is advised that if such monitoring reveals possible * * evidence of criminal activity, system personnel may provide * * the evidence from such monitoring to law enforcement officials.* ****************************************************************** CollectdExtraPlugins: - rrdtool LoggingServers: - host: 127.0.0.1 port: 24224 TtyValues: - console - tty1 - tty2 - tty3 - tty4 - tty5 - tty6 ContainerCli: podman