Remove old 404 checker job
This is being replaced with goaccess report jobs. Change-Id: Ia22d847bfc1a9e450bd8c8e7fab77dd08bd1dfd0
This commit is contained in:
parent
9e394d24d0
commit
68f740faf8
@ -1183,14 +1183,6 @@
|
|||||||
vars:
|
vars:
|
||||||
playbook_name: zuul_reconfigure.yaml
|
playbook_name: zuul_reconfigure.yaml
|
||||||
|
|
||||||
- job:
|
|
||||||
name: system-config-static-404-checker
|
|
||||||
description: |
|
|
||||||
Run 404 scraping script on static.opendev.org
|
|
||||||
nodeset:
|
|
||||||
nodes: []
|
|
||||||
run: playbooks/periodic/404.yaml
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: system-config-goaccess-report
|
name: system-config-goaccess-report
|
||||||
description: |
|
description: |
|
||||||
@ -1381,5 +1373,4 @@
|
|||||||
- system-config-promote-image-python-builder
|
- system-config-promote-image-python-builder
|
||||||
periodic:
|
periodic:
|
||||||
jobs:
|
jobs:
|
||||||
- system-config-static-404-checker
|
|
||||||
- zuul-ci-goaccess-report
|
- zuul-ci-goaccess-report
|
||||||
|
@ -1,42 +0,0 @@
|
|||||||
- hosts: localhost
|
|
||||||
tasks:
|
|
||||||
- name: Add static.opendev.org to inventory
|
|
||||||
add_host:
|
|
||||||
name: static.opendev.org
|
|
||||||
ansible_connection: ssh
|
|
||||||
ansible_host: static.opendev.org
|
|
||||||
ansible_port: 22
|
|
||||||
ansible_user: zuul
|
|
||||||
|
|
||||||
- name: Add static.opendev.org host key
|
|
||||||
known_hosts:
|
|
||||||
name: static.opendev.org
|
|
||||||
key: static.opendev.org,23.253.245.150,2001:4800:7818:101:be76:4eff:fe04:7c28 ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBMu3PnnkNhPS2d5Z2uPju3Qqcbbc0lwHA1j9MgHlLnbK3bx1O2Kfez6RJUGl2i6nshdzkKwPBvN2vehQKiw1oSk=
|
|
||||||
|
|
||||||
# NOTE(ianw): 2020-02-25 just for initial testing run this for one log
|
|
||||||
# in a dumb way. We can scrape a few more sites. Overall, we expect
|
|
||||||
# this to be replaced with a better analysis tool, see
|
|
||||||
# https://review.opendev.org/709236
|
|
||||||
- hosts: static.opendev.org
|
|
||||||
tasks:
|
|
||||||
- name: Run 404 scraping script
|
|
||||||
become: yes
|
|
||||||
shell: |
|
|
||||||
SOURCE_FILE=/var/log/apache2/docs.openstack.org_access.log
|
|
||||||
INTERMEDIATE_FILE=$(mktemp)
|
|
||||||
|
|
||||||
# Get just the lines with 404s in them
|
|
||||||
grep ' 404 ' $SOURCE_FILE | sed -n -e 's/.*"GET \(\/.*\) HTTP\/1\.." 404 .*/\1/p' > $INTERMEDIATE_FILE
|
|
||||||
|
|
||||||
if [ -f "$SOURCE_FILE.1" ] ; then
|
|
||||||
# We get roughly the last days worth of logs by looking at the last two
|
|
||||||
# log files.
|
|
||||||
grep ' 404 ' $SOURCE_FILE.1 | sed -n -e 's/.*"GET \(\/.*\) HTTP\/1\.." 404 .*/\1/p' >> $INTERMEDIATE_FILE
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Process those 404s to count them and return sorted by count
|
|
||||||
sort $INTERMEDIATE_FILE | uniq -c | sort -rn | grep '\(html\|\/$\)'
|
|
||||||
|
|
||||||
rm ${INTERMEDIATE_FILE}
|
|
||||||
args:
|
|
||||||
executable: /bin/bash
|
|
Loading…
x
Reference in New Issue
Block a user