ansible-role-collect-logs/roles/collect-logs/tasks/publish.yml

152 lines
6.2 KiB
YAML

---
# collection dir could be either a dir or a link
# file module cannot be used here, because it changes link to dir
# when called with state: directory
- name: Ensure the collection directory exists
shell: |
if [[ ! -d "{{ artcl_collect_dir }}" && ! -h "{{ artcl_collect_dir }}" ]]; then
mkdir -p "{{ artcl_collect_dir }}"
fi
- name: fetch and gzip the console log
shell: >
set -o pipefail &&
curl -k "{{ lookup('env', 'BUILD_URL') }}/timestamps/?time=yyyy-MM-dd%20HH:mm:ss.SSS%20|&appendLog&locale=en_GB"
| gzip > {{ artcl_collect_dir }}/console.txt.gz
when: lookup('env', 'BUILD_URL') != ""
- name: Generate and retrieve the ARA static playbook report
shell: >
{{ local_working_dir }}/bin/ara generate html {{ local_working_dir }}/ara_oooq;
{{ local_working_dir }}/bin/ara task list --all -f json > {{ artcl_collect_dir }}/ara.json;
cp -r {{ local_working_dir }}/ara_oooq {{ artcl_collect_dir }}/;
{% if artcl_gzip_only|bool %}gzip --best --recursive {{ artcl_collect_dir }}/ara_oooq;{% endif %}
ignore_errors: true
- name: Generate and retrieve root the ARA static playbook report
become: true
shell: >
{{ local_working_dir }}/bin/ara generate html {{ local_working_dir }}/ara_oooq_root;
{{ local_working_dir }}/bin/ara task list --all -f json > {{ artcl_collect_dir }}/ara.oooq.root.json;
cp -r {{ local_working_dir }}/ara_oooq_root {{ artcl_collect_dir }}/;
{% if artcl_gzip_only|bool %}gzip --best --recursive {{ artcl_collect_dir }}/ara_oooq_root;{% endif %}
ignore_errors: true
- name: Generate and retrieve the ARA static playbook report for OC deploy
become: true
shell: >
{{ local_working_dir }}/bin/ara generate html {{ local_working_dir }}/ara_oooq_oc;
{{ local_working_dir }}/bin/ara task list --all -f json > {{ artcl_collect_dir }}/ara.oooq.oc.json;
cp -r {{ local_working_dir }}/ara_oooq_oc {{ artcl_collect_dir }}/;
{% if artcl_gzip_only|bool %}gzip --best --recursive {{ artcl_collect_dir }}/ara_oooq_oc;{% endif %}
ignore_errors: true
environment:
ARA_DATABASE: 'sqlite:///{{ ara_overcloud_db_path }}'
- include: ara_graphite.yml
when: ara_graphite_server is defined
ignore_errors: true
- include: ara_influxdb.yml
when: influxdb_url is defined or influxdb_create_data_file|bool
ignore_errors: true
- name: fetch stackviz results to the root of the collect_dir
shell: >
if [ -d {{ artcl_collect_dir }}/undercloud/var/log/extra/stackviz/data ]; then
cp -r {{ artcl_collect_dir }}/undercloud/var/log/extra/stackviz {{ artcl_collect_dir }};
gunzip -fr {{ artcl_collect_dir }}/stackviz;
fi;
- name: fetch stackviz results to the root of the collect_dir for os_tempest
shell: >
if [ -d {{ artcl_collect_dir }}/undercloud/var/log/tempest/stackviz/data ]; then
cp -r {{ artcl_collect_dir }}/undercloud/var/log/tempest/stackviz {{ artcl_collect_dir }};
gunzip -fr {{ artcl_collect_dir }}/stackviz;
fi;
when: use_os_tempest is defined
- name: fetch tempest results to the root of the collect_dir
shell: >
cp {{ artcl_collect_dir }}/undercloud/home/stack/tempest/tempest.{xml,html}{,.gz} {{ artcl_collect_dir }} || true;
gunzip {{ artcl_collect_dir }}/tempest.{xml,html}.gz || true;
- name: fetch tempest results to the root of the collect_dir for os_tempest
shell: >
cp {{ artcl_collect_dir }}/undercloud/var/log/tempest/stestr_results.html.gz {{ artcl_collect_dir }} || true;
gunzip {{ artcl_collect_dir }}/stestr_results.html.gz || true;
when: use_os_tempest is defined
- name: Fetch .sh and .log files from local working directory on localhost
shell: >
cp {{ item }} {{ artcl_collect_dir }}/
with_items:
- "{{ local_working_dir }}/*.sh"
- "{{ local_working_dir }}/*.log"
ignore_errors: "yes"
- name: Rename compressed text based files to end with txt.gz extension
shell: >
set -o pipefail &&
find {{ artcl_collect_dir }}/ -type f |
awk 'function rename(orig)
{ new=orig; sub(/\.gz$/, ".txt.gz", new); system("mv " orig " " new) }
/\.(conf|ini|json|sh|log|yaml|yml|repo|cfg|j2|py)\.gz$/ { rename($0) }
/(\/var\/log\/|\/etc\/)[^ \/\.]+\.gz$/ { rename($0) }';
when: artcl_txt_rename|bool
- name: Create the reproducer script
include_role:
name: create-reproducer-script
when: ansible_env.TOCI_JOBTYPE is defined
- name: Create the zuul-based reproducer script if we are running on zuul
include_role:
name: create-zuul-based-reproducer
when: zuul is defined
- name: upload to the artifact server using pubkey auth
shell: rsync -av --quiet -e "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" {{ artcl_collect_dir }}/ {{ artcl_rsync_path }}/{{ lookup('env', 'BUILD_TAG') }}
async: "{{ artcl_publish_timeout }}"
poll: 15
retries: 5
delay: 60
when: artcl_use_rsync|bool and not artcl_rsync_use_daemon|bool
- name: upload to the artifact server using password auth
environment:
RSYNC_PASSWORD: "{{ lookup('env', 'RSYNC_PASSWORD') }}"
shell: rsync -av --quiet {{ artcl_collect_dir }}/ {{ artcl_rsync_path }}/{{ lookup('env', 'BUILD_TAG') }}
async: "{{ artcl_publish_timeout }}"
poll: 15
retries: 5
delay: 60
when: artcl_use_rsync|bool and artcl_rsync_use_daemon|bool
- name: upload to swift based artifact server
shell: swift upload --quiet --header "X-Delete-After:{{ artcl_swift_delete_after }}" {{ artcl_swift_container }}/{{ lookup('env', 'BUILD_TAG') }} *
args:
chdir: "{{ artcl_collect_dir }}"
changed_when: true
environment:
OS_AUTH_URL: "{{ artcl_swift_auth_url }}"
OS_USERNAME: "{{ artcl_swift_username }}"
OS_PASSWORD: "{{ artcl_swift_password }}"
OS_TENANT_NAME: "{{ artcl_swift_tenant_name }}"
async: "{{ artcl_publish_timeout }}"
poll: 15
when: artcl_use_swift|bool
- name: use zuul_swift_upload.py to publish the files
shell: "{{ artcl_zuul_swift_upload_path }}/zuul_swift_upload.py --name {{ artcl_swift_container }} --delete-after {{ artcl_swift_delete_after }} {{ artcl_collect_dir }}"
async: "{{ artcl_publish_timeout }}"
poll: 15
when: artcl_use_zuul_swift_upload|bool
- name: create the artifact location redirect file
template:
src: full_logs.html.j2
dest: "{{ artcl_collect_dir }}/full_logs.html"
when: artcl_env != 'tripleo-ci'