collect-logs: run in two stages
We should run and publish a minimal sets of files even without any host, when we failed the run before inventory generation. This change separates the collection step that runs on all hosts except localhost, and the rest running on localhost. Running on localhost always succeeds, even with an empty inventory. Also add a log environment file for local collect-logs.sh runs that does not upload logs. Change-Id: I48d07d42be879026fb80afd73835484770006f85
This commit is contained in:
parent
1a3897c326
commit
2bb8b177bc
6
config/general_config/local-logs.yml
Normal file
6
config/general_config/local-logs.yml
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
# this file is intentionally left blank, it can be used to pass the "local"
|
||||
# parameter to the collect-logs.sh CI script, resulting in using the default
|
||||
# settings, collecting logs locally but not attempting to upload them anywhere
|
||||
# we need at least one dummy value here
|
||||
artcl_placeholder: true
|
@ -1,7 +1,11 @@
|
||||
---
|
||||
|
||||
- name: Collect logs, create docs, publish
|
||||
- name: Collect logs
|
||||
hosts: all:!localhost
|
||||
gather_facts: no
|
||||
roles:
|
||||
- role: collect-logs
|
||||
- collect-logs
|
||||
|
||||
- name: Create docs, publish logs
|
||||
hosts: localhost
|
||||
roles:
|
||||
- {role: collect-logs, artcl_collect: false }
|
||||
|
@ -1,12 +1,13 @@
|
||||
---
|
||||
- become: true
|
||||
ignore_errors: true
|
||||
block:
|
||||
- name: Ensure required rpms for logging are installed
|
||||
yum: name={{ item }} state=present
|
||||
with_flattened:
|
||||
- gzip
|
||||
- tar
|
||||
|
||||
- name: Ensure required rpms for logging are installed
|
||||
yum: name={{ item }} state=present
|
||||
with_flattened:
|
||||
- gzip
|
||||
- tar
|
||||
|
||||
- block:
|
||||
- name: Prepare directory with extra logs
|
||||
file: dest=/var/log/extra state=directory
|
||||
|
||||
@ -138,20 +139,20 @@
|
||||
file:
|
||||
path: "/tmp/{{ inventory_hostname }}"
|
||||
state: absent
|
||||
become: yes
|
||||
become_user: root
|
||||
ignore_errors: true
|
||||
|
||||
- name: Set default collect list
|
||||
set_fact: collect_list="{{ artcl_collect_list | join(' ') }}"
|
||||
set_fact:
|
||||
collect_list: "{{ artcl_collect_list | join(' ') }}"
|
||||
|
||||
- name: Override collect list
|
||||
set_fact: collect_list="{{ artcl_collect_override[inventory_hostname] | join(' ') }}"
|
||||
when: artcl_collect_override is defined and artcl_collect_override[inventory_hostname] is defined
|
||||
set_fact:
|
||||
collect_list: "{{ artcl_collect_override[inventory_hostname] | join(' ') }}"
|
||||
when:
|
||||
- artcl_collect_override is defined
|
||||
- artcl_collect_override[inventory_hostname] is defined
|
||||
|
||||
- name: Gather the logs to /tmp
|
||||
become: yes
|
||||
become_user: root
|
||||
shell: >
|
||||
mkdir -p /tmp/{{ inventory_hostname }};
|
||||
for F in $(ls -d1 /var/log/rpm.list /var/log/extra {{ collect_list }}); do
|
||||
@ -197,21 +198,15 @@
|
||||
state: absent
|
||||
ignore_errors: true
|
||||
|
||||
- block:
|
||||
- delegate_to: localhost
|
||||
when: artcl_gzip_only|bool
|
||||
block:
|
||||
- name: Extract the logs
|
||||
shell: >
|
||||
chdir={{ artcl_collect_dir }}
|
||||
tar xf {{ inventory_hostname }}.tar;
|
||||
when: artcl_gzip_only|bool
|
||||
|
||||
- name: delete the tar file after extraction
|
||||
file:
|
||||
path: "{{ artcl_collect_dir }}/{{ inventory_hostname }}.tar"
|
||||
state: absent
|
||||
when: artcl_gzip_only|bool
|
||||
|
||||
- name: fetch and gzip the console log
|
||||
shell: >
|
||||
curl {{ lookup('env', 'BUILD_URL') }}/consoleText | gzip > {{ artcl_collect_dir }}/console.txt.gz
|
||||
when: artcl_publish|bool and "{{ lookup('env', 'BUILD_URL') }}" != ""
|
||||
delegate_to: localhost
|
||||
|
@ -1,43 +1,37 @@
|
||||
---
|
||||
|
||||
- block:
|
||||
- name: Ensure required python packages are installed
|
||||
pip:
|
||||
requirements: "{{ local_working_dir }}/usr/local/share/ansible/roles/collect-logs/doc-requirements.txt"
|
||||
- name: Ensure required python packages are installed
|
||||
pip:
|
||||
requirements: "{{ local_working_dir }}/usr/local/share/ansible/roles/collect-logs/doc-requirements.txt"
|
||||
|
||||
- name: Unarchive shell scripts
|
||||
shell: >
|
||||
gunzip "{{ artcl_collect_dir }}/undercloud/home/stack/{{ item }}.sh.gz";
|
||||
with_items: "{{ artcl_create_docs_payload.included_deployment_scripts }}"
|
||||
ignore_errors: yes
|
||||
when: artcl_gzip_only|bool
|
||||
- name: Unarchive shell scripts
|
||||
shell: >
|
||||
gunzip "{{ artcl_collect_dir }}/undercloud/home/stack/{{ item }}.sh.gz";
|
||||
with_items: "{{ artcl_create_docs_payload.included_deployment_scripts }}"
|
||||
ignore_errors: yes
|
||||
when: artcl_gzip_only|bool
|
||||
|
||||
- name: Generate rST docs from scripts and move to Sphinx src dir
|
||||
shell: >
|
||||
awk -f "{{ local_working_dir }}/usr/local/share/ansible/roles/collect-logs/scripts/doc_extrapolation.awk" \
|
||||
"{{ artcl_collect_dir }}/undercloud/home/stack/{{ item }}.sh" > \
|
||||
"{{ artcl_docs_source_dir }}/{{ item }}.rst"
|
||||
with_items: "{{ artcl_create_docs_payload.included_deployment_scripts }}"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Generate rST docs from scripts and move to Sphinx src dir
|
||||
shell: >
|
||||
awk -f "{{ local_working_dir }}/usr/local/share/ansible/roles/collect-logs/scripts/doc_extrapolation.awk" \
|
||||
"{{ artcl_collect_dir }}/undercloud/home/stack/{{ item }}.sh" > \
|
||||
"{{ artcl_docs_source_dir }}/{{ item }}.rst"
|
||||
with_items: "{{ artcl_create_docs_payload.included_deployment_scripts }}"
|
||||
ignore_errors: yes
|
||||
- name: Fetch static rST docs to include in output docs
|
||||
shell: >
|
||||
cp "{{ artcl_docs_source_dir }}/../static/{{ item }}.rst" "{{ artcl_docs_source_dir }}"
|
||||
with_items: "{{ artcl_create_docs_payload.included_static_docs }}"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Fetch static rST docs to include in output docs
|
||||
shell: >
|
||||
cp "{{ artcl_docs_source_dir }}/../static/{{ item }}.rst" "{{ artcl_docs_source_dir }}"
|
||||
with_items: "{{ artcl_create_docs_payload.included_static_docs }}"
|
||||
ignore_errors: yes
|
||||
- name: Generate fresh index.rst for Sphinx
|
||||
template:
|
||||
src: index.rst.j2
|
||||
dest: "{{ artcl_docs_source_dir }}/index.rst"
|
||||
force: yes
|
||||
|
||||
- name: Generate fresh index.rst for Sphinx
|
||||
template:
|
||||
src: index.rst.j2
|
||||
dest: "{{ artcl_docs_source_dir }}/index.rst"
|
||||
force: yes
|
||||
|
||||
- name: Build docs with Sphinx
|
||||
shell: >
|
||||
sphinx-build -b html "{{ artcl_docs_source_dir }}" \
|
||||
"{{ artcl_docs_build_dir }}" &> "{{ local_working_dir }}/sphinx_build.log"
|
||||
|
||||
delegate_to: localhost
|
||||
run_once: true
|
||||
when: artcl_gen_docs|bool
|
||||
- name: Build docs with Sphinx
|
||||
shell: >
|
||||
sphinx-build -b html "{{ artcl_docs_source_dir }}" \
|
||||
"{{ artcl_docs_build_dir }}" &> "{{ local_working_dir }}/sphinx_build.log"
|
||||
|
@ -5,8 +5,12 @@
|
||||
|
||||
- name: Generate docs
|
||||
include: create-docs.yml
|
||||
when: artcl_gen_docs|bool
|
||||
when:
|
||||
- artcl_gen_docs|bool
|
||||
- not artcl_collect|bool
|
||||
|
||||
- name: Publish logs
|
||||
include: publish.yml
|
||||
when: artcl_publish|bool
|
||||
when:
|
||||
- artcl_publish|bool
|
||||
- not artcl_collect|bool
|
||||
|
@ -1,39 +1,45 @@
|
||||
---
|
||||
- name: Ensure the collection directory exists
|
||||
file:
|
||||
path: "{{ artcl_collect_dir }}"
|
||||
state: directory
|
||||
|
||||
- run_once: true
|
||||
delegate_to: localhost
|
||||
block:
|
||||
- name: upload to the artifact server using pubkey auth
|
||||
command: rsync -av --quiet -e "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" {{ artcl_collect_dir }}/ {{ artcl_rsync_path }}/{{ lookup('env', 'BUILD_TAG') }}
|
||||
retries: 5
|
||||
delay: 60
|
||||
when: artcl_use_rsync|bool and not artcl_rsync_use_daemon|bool
|
||||
- name: fetch and gzip the console log
|
||||
shell: >
|
||||
curl {{ lookup('env', 'BUILD_URL') }}/consoleText | gzip > {{ artcl_collect_dir }}/console.txt.gz
|
||||
when: lookup('env', 'BUILD_URL') != ""
|
||||
|
||||
- name: upload to the artifact server using password auth
|
||||
environment:
|
||||
RSYNC_PASSWORD: "{{ lookup('env', 'RSYNC_PASSWORD') }}"
|
||||
command: rsync -av --quiet {{ artcl_collect_dir }}/ {{ artcl_rsync_path }}/{{ lookup('env', 'BUILD_TAG') }}
|
||||
retries: 5
|
||||
delay: 60
|
||||
when: artcl_use_rsync|bool and artcl_rsync_use_daemon|bool
|
||||
- name: upload to the artifact server using pubkey auth
|
||||
command: rsync -av --quiet -e "ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" {{ artcl_collect_dir }}/ {{ artcl_rsync_path }}/{{ lookup('env', 'BUILD_TAG') }}
|
||||
retries: 5
|
||||
delay: 60
|
||||
when: artcl_use_rsync|bool and not artcl_rsync_use_daemon|bool
|
||||
|
||||
- name: upload to swift based artifact server
|
||||
shell: swift upload --quiet --header "X-Delete-After:{{ artcl_swift_delete_after }}" {{ artcl_swift_container }}/{{ lookup('env', 'BUILD_TAG') }} *
|
||||
args:
|
||||
chdir: "{{ artcl_collect_dir }}"
|
||||
changed_when: true
|
||||
environment:
|
||||
OS_AUTH_URL: "{{ artcl_swift_auth_url }}"
|
||||
OS_USERNAME: "{{ artcl_swift_username }}"
|
||||
OS_PASSWORD: "{{ artcl_swift_password }}"
|
||||
OS_TENANT_NAME: "{{ artcl_swift_tenant_name }}"
|
||||
when: artcl_use_swift|bool
|
||||
- name: upload to the artifact server using password auth
|
||||
environment:
|
||||
RSYNC_PASSWORD: "{{ lookup('env', 'RSYNC_PASSWORD') }}"
|
||||
command: rsync -av --quiet {{ artcl_collect_dir }}/ {{ artcl_rsync_path }}/{{ lookup('env', 'BUILD_TAG') }}
|
||||
retries: 5
|
||||
delay: 60
|
||||
when: artcl_use_rsync|bool and artcl_rsync_use_daemon|bool
|
||||
|
||||
- name: use zuul_swift_upload.py to publish the files
|
||||
command: "{{ artcl_zuul_swift_upload_path }}/zuul_swift_upload.py --name {{ artcl_swift_container }} --delete-after {{ artcl_swift_delete_after }} {{ artcl_collect_dir }}"
|
||||
when: artcl_use_zuul_swift_upload|bool
|
||||
- name: upload to swift based artifact server
|
||||
shell: swift upload --quiet --header "X-Delete-After:{{ artcl_swift_delete_after }}" {{ artcl_swift_container }}/{{ lookup('env', 'BUILD_TAG') }} *
|
||||
args:
|
||||
chdir: "{{ artcl_collect_dir }}"
|
||||
changed_when: true
|
||||
environment:
|
||||
OS_AUTH_URL: "{{ artcl_swift_auth_url }}"
|
||||
OS_USERNAME: "{{ artcl_swift_username }}"
|
||||
OS_PASSWORD: "{{ artcl_swift_password }}"
|
||||
OS_TENANT_NAME: "{{ artcl_swift_tenant_name }}"
|
||||
when: artcl_use_swift|bool
|
||||
|
||||
- name: create the artifact location redirect file
|
||||
template:
|
||||
src: full_logs.html.j2
|
||||
dest: "{{ artcl_collect_dir }}/full_logs.html"
|
||||
- name: use zuul_swift_upload.py to publish the files
|
||||
command: "{{ artcl_zuul_swift_upload_path }}/zuul_swift_upload.py --name {{ artcl_swift_container }} --delete-after {{ artcl_swift_delete_after }} {{ artcl_collect_dir }}"
|
||||
when: artcl_use_zuul_swift_upload|bool
|
||||
|
||||
- name: create the artifact location redirect file
|
||||
template:
|
||||
src: full_logs.html.j2
|
||||
dest: "{{ artcl_collect_dir }}/full_logs.html"
|
||||
|
Loading…
Reference in New Issue
Block a user