Handle robots.txt on logs.o.o

We don't want to delete the top-level robots.txt file - but everything
that is part of the logs hierarchy.

Change-Id: I369565a780eb749eab95e4bf8e7d8d7fd5bf0630
This commit is contained in:
Andreas Jaeger 2019-08-06 21:32:58 +02:00 committed by Andreas Jaeger
parent 329f330b3e
commit f494aa8762
1 changed files with 3 additions and 2 deletions

View File

@ -1,8 +1,9 @@
#!/bin/sh
sleep $((RANDOM%600)) && \
flock -n /var/run/gziplogs.lock \
find -O3 /srv/static/logs/ -depth -not -name robots.txt -not -name lost+found \
-not -wholename /srv/static/logs/help/\* \( \
find -O3 /srv/static/logs/ -depth -not -name lost+found \
-not -wholename /srv/static/logs/help/\* \
-not -wholename /srv/static/logs/robots.txt \( \
\( -type f -mmin +10 -not -name \*\[.-\]gz -not -name \*\[._-\]\[zZ\] \
\( -name \*.txt -or -name \*.html -or -name tmp\* \) \
-exec gzip \{\} \; \) \