Use Ceph-NFS for Manila in scenario004
CephFS gatewayed by NFS is more generally suitable for multi-tenant OpenStack deployments than native CephFS since the latter requires that VMs belonging to regular members of Keystone projects be exposed to the Ceph infrastructure and run client software with capabilities that are not appropriate for untrusted cloud tenants. Change-Id: I269607d43f45f65efcbce33dd776e7eb4f475311 (cherry picked from commit63c5a94f83
) (cherry picked from commit7c2933d3b4
) (cherry picked from commitd9414af719
)
This commit is contained in:
parent
c11333dd81
commit
e038ecd2e8
|
@ -10,6 +10,7 @@ resource_registry:
|
|||
OS::TripleO::Services::CephMon: ../../deployment/ceph-ansible/ceph-mon.yaml
|
||||
OS::TripleO::Services::CephOSD: ../../deployment/ceph-ansible/ceph-osd.yaml
|
||||
OS::TripleO::Services::CephMds: ../../deployment/ceph-ansible/ceph-mds.yaml
|
||||
OS::TripleO::Services::CephNfs: ../../deployment/ceph-ansible/ceph-nfs.yaml
|
||||
OS::TripleO::Services::CephRgw: ../../deployment/ceph-ansible/ceph-rgw.yaml
|
||||
OS::TripleO::Services::CephClient: ../../deployment/ceph-ansible/ceph-client.yaml
|
||||
OS::TripleO::Services::SwiftProxy: OS::Heat::None
|
||||
|
@ -96,3 +97,7 @@ parameter_defaults:
|
|||
CephConfigOverrides:
|
||||
globalkey: globalvalue
|
||||
anotherkey: anothervalue
|
||||
ManilaCephFSCephFSProtocolHelperType: 'NFS'
|
||||
# Workaround for https://bugs.launchpad.net/tripleo/+bug/1911022
|
||||
ExtraConfig:
|
||||
ganesha_vip: 192.168.24.3
|
|
@ -21,6 +21,8 @@
|
|||
subnet: storage_subnet
|
||||
StorageMgmt:
|
||||
subnet: storage_mgmt_subnet
|
||||
StorageNFS:
|
||||
subnet: storage_nfs_subnet
|
||||
Tenant:
|
||||
subnet: tenant_subnet
|
||||
ServicesDefault:
|
||||
|
@ -44,6 +46,7 @@
|
|||
- OS::TripleO::Services::CephMds
|
||||
- OS::TripleO::Services::CephMgr
|
||||
- OS::TripleO::Services::CephMon
|
||||
- OS::TripleO::Services::CephNfs
|
||||
- OS::TripleO::Services::CephRbdMirror
|
||||
- OS::TripleO::Services::CephRgw
|
||||
- OS::TripleO::Services::CephOSD
|
||||
|
|
Loading…
Reference in New Issue