openstack-ansible/playbooks/os-cinder-install.yml
Jesse Pretorius a40cb58118 Wait for container ssh after apparmor profile update
This patch adds a wait for the container's sshd to be available
after the container's apparmor profile is updated. When the
profile is updated the container is restarted, so this wait is
essential to the success of the playbook's completion.

It also includes 3 retries which has been found to improve the
rate of success.

Due to an upstream change in behaviour with netaddr 0.7.16 we
need to pin the package to a lower version until Neutron is
adjusted and we bump the Neutron SHA.

Change-Id: I30575ee31929b0c9af6353b7255cdfb6cebd2104
Closes-Bug: #1490142
2015-09-02 09:21:55 +01:00

140 lines
4.6 KiB
YAML

---
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
- name: Install cinder server
hosts: cinder_all
max_fail_percentage: 20
user: root
pre_tasks:
- name: Use the lxc-openstack aa profile
lxc_container:
name: "{{ container_name }}"
container_config:
- "lxc.aa_profile=unconfined"
delegate_to: "{{ physical_host }}"
when: >
not is_metal | bool and
inventory_hostname in groups['cinder_volume']
tags:
- lxc-aa-profile
- name: Add volume group block device to cinder
shell: |
{% if item.1.volume_group is defined %}
if [ "$(pvdisplay | grep -B1 {{ item.1.volume_group }} | awk '/PV/ {print $3}')" ];then
for device in `pvdisplay | grep -B1 {{ item.1.volume_group }} | awk '/PV/ {print $3}'`
do lxc-device -n {{ container_name }} add $device
done
fi
{% else %}
echo "{{ item.1 }} volume_group not defined"
{% endif %}
with_items: cinder_backends|dictsort
when: >
cinder_backends is defined and
physical_host != container_name
delegate_to: "{{ physical_host }}"
tags:
- cinder-lxc-devices
- name: Cinder volume extra lxc config
lxc_container:
name: "{{ container_name }}"
container_config:
- "lxc.autodev=0"
- "lxc.cgroup.devices.allow=a *:* rmw"
- "lxc.mount.entry=udev dev devtmpfs defaults 0 0"
delegate_to: "{{ physical_host }}"
when: >
not is_metal | bool and
inventory_hostname in groups['cinder_volume'] and
cinder_backend_lvm_inuse
tags:
- cinder-container-setup
register: lxc_config
- name: udevadm trigger
command: udevadm trigger
tags:
- cinder-container-setup
delegate_to: "{{ physical_host }}"
when: lxc_config is defined and lxc_config.changed
- name: Flush net cache
command: /usr/local/bin/lxc-system-manage flush-net-cache
delegate_to: "{{ physical_host }}"
tags:
- flush-net-cache
- name: Wait for container ssh
wait_for:
port: "22"
delay: "{{ ssh_delay }}"
search_regex: "OpenSSH"
host: "{{ ansible_ssh_host }}"
delegate_to: "{{ physical_host }}"
register: ssh_wait_check
until: ssh_wait_check|success
retries: 3
tags:
- ssh-wait
- name: Sort the rabbitmq servers
dist_sort:
value_to_lookup: "{{ container_name }}"
ref_list: "{{ groups['cinder_all'] }}"
src_list: "{{ rabbitmq_servers }}"
register: servers
- name: Set rabbitmq servers
set_fact:
rabbitmq_servers: "{{ servers.sorted_list }}"
- name: Create log dir
file:
path: "{{ item.path }}"
state: directory
with_items:
- { path: "/openstack/log/{{ inventory_hostname }}-cinder" }
when: is_metal | bool
tags:
- cinder-logs
- cinder-log-dirs
- name: Create log aggregation links
file:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
state: "{{ item.state }}"
force: "yes"
with_items:
- { src: "/openstack/log/{{ inventory_hostname }}-cinder", dest: "/var/log/cinder", state: "link" }
when: is_metal | bool
tags:
- cinder-logs
roles:
- { role: "os_cinder", tags: [ "os-cinder" ] }
- role: "ceph_client"
openstack_service_system_user: "{{ cinder_system_user_name }}"
tags:
- "cinder-ceph-client"
- "ceph-client"
- role: "rsyslog_client"
rsyslog_client_log_dir: "/var/log/cinder"
rsyslog_client_config_name: "99-cinder-rsyslog-client.conf"
tags:
- "cinder-rsyslog-client"
- "rsyslog-client"
- role: "system_crontab_coordination"
tags:
- "system-crontab-coordination"
vars:
cinder_galera_address: "{{ internal_lb_vip_address }}"
glance_host: "{{ internal_lb_vip_address }}"
ansible_hostname: "{{ container_name }}"
cinder_storage_address: "{{ container_address }}"
is_metal: "{{ properties.is_metal|default(false) }}"