03e59aa981
If this flag is set, the logs are copied into the published job. There's no need to save an encrypted copy of the same thing. Change-Id: I32ac5e0ac4d2307f2e1df88c5e2ccbe2fd381839
113 lines
3.6 KiB
YAML
113 lines
3.6 KiB
YAML
- hosts: localhost
|
|
roles:
|
|
- add-bastion-host
|
|
|
|
- hosts: prod_bastion[0]
|
|
tasks:
|
|
- name: Encrypt log
|
|
when: >
|
|
infra_prod_playbook_encrypt_log|default(False) and
|
|
(not infra_prod_playbook_collect_log)
|
|
block:
|
|
|
|
- name: Create temporary staging area for encrypted logs
|
|
tempfile:
|
|
state: directory
|
|
register: _encrypt_tempdir
|
|
|
|
- name: Copy log to tempdir as Zuul user
|
|
copy:
|
|
src: '/var/log/ansible/{{ playbook_name }}.log'
|
|
dest: '{{ _encrypt_tempdir.path }}'
|
|
owner: zuul
|
|
group: zuul
|
|
mode: '0644'
|
|
remote_src: yes
|
|
become: yes
|
|
|
|
- name: Encrypt logs
|
|
include_role:
|
|
name: encrypt-logs
|
|
vars:
|
|
encrypt_logs_files:
|
|
- '{{ _encrypt_tempdir.path }}/{{ playbook_name }}.log'
|
|
# Artifact URL should just point to root directory, so blank
|
|
encrypt_logs_artifact_path: ''
|
|
encrypt_logs_download_script_path: '{{ _encrypt_tempdir.path }}'
|
|
|
|
- name: Return logs
|
|
synchronize:
|
|
src: '{{ item[0] }}'
|
|
dest: '{{ item[1] }}'
|
|
mode: pull
|
|
verify_host: true
|
|
loop:
|
|
- ['{{ _encrypt_tempdir.path }}/{{ playbook_name }}.log.gpg', '{{ zuul.executor.log_root }}/{{ playbook_name }}.log.gpg']
|
|
- ['{{ _encrypt_tempdir.path }}/download-logs.sh' , '{{ zuul.executor.log_root }}/download-gpg-logs.sh']
|
|
|
|
always:
|
|
|
|
- name: Remove temporary staging
|
|
file:
|
|
path: '{{ _encrypt_tempdir.path }}'
|
|
state: absent
|
|
when: _encrypt_tempdir is defined
|
|
|
|
# Not using normal zuul job roles as the bastion host is not a
|
|
# test node with all the normal bits in place.
|
|
- name: Collect log output
|
|
synchronize:
|
|
dest: "{{ zuul.executor.log_root }}/{{ playbook_name }}.log"
|
|
mode: pull
|
|
src: "/var/log/ansible/{{ playbook_name }}.log"
|
|
verify_host: true
|
|
when: infra_prod_playbook_collect_log
|
|
|
|
- name: Return playbook log artifact to Zuul
|
|
when: infra_prod_playbook_collect_log
|
|
zuul_return:
|
|
data:
|
|
zuul:
|
|
artifacts:
|
|
- name: "Playbook Log"
|
|
url: "{{ playbook_name }}.log"
|
|
metadata:
|
|
type: text
|
|
|
|
# Save files locally on bridge
|
|
- name: Get original timestamp from file header
|
|
shell: |
|
|
head -1 /var/log/ansible/{{ playbook_name }}.log | sed -n 's/^Running \(.*\):.*$/\1/p'
|
|
args:
|
|
executable: /bin/bash
|
|
register: _log_timestamp
|
|
|
|
- name: Turn timestamp into a string
|
|
set_fact:
|
|
_log_timestamp: '{{ _log_timestamp.stdout | trim }}'
|
|
|
|
- name: Rename playbook log on bridge
|
|
become: yes
|
|
copy:
|
|
remote_src: yes
|
|
src: "/var/log/ansible/{{ playbook_name }}.log"
|
|
dest: "/var/log/ansible/{{ playbook_name }}.log.{{ _log_timestamp }}"
|
|
|
|
# Reset the access/modification time to the timestamp in the filename; this
|
|
# makes lining things up more logical
|
|
- name: Reset file time
|
|
file:
|
|
path: '/var/log/ansible/{{ playbook_name }}.log.{{ _log_timestamp }}'
|
|
state: touch
|
|
modification_time: '{{ _log_timestamp }}'
|
|
modification_time_format: '%Y-%m-%dT%H:%M:%S'
|
|
access_time: '{{ _log_timestamp }}'
|
|
access_time_format: '%Y-%m-%dT%H:%M:%S'
|
|
become: yes
|
|
|
|
- name: Cleanup old playbook logs on bridge
|
|
become: yes
|
|
shell: |
|
|
find /var/log/ansible -name '{{ playbook_name }}.log.*' -type f -mtime +30 -delete
|
|
|