Serve the local /robots.txt instead of proxying it

Exclude "/robots.txt" from Apache's reverse proxying so that it
can be served to crawlers.

Change-Id: Ibd3a592e7906169d40619ae5c8dc02afe3589a81
Depends-On: I55a86d3c703a667daf55cb75cf559eb23a556219
This commit is contained in:
Clint Adams 2015-10-02 17:28:23 -04:00
parent d38753b5f6
commit 283fa23e56
1 changed files with 4 additions and 3 deletions

View File

@ -14,9 +14,10 @@ define lodgeit::site(
include ::httpd
::httpd::vhost::proxy { $vhost_name:
port => 80,
dest => "http://localhost:${port}",
require => File["/srv/lodgeit/${name}"],
port => 80,
dest => "http://localhost:${port}",
require => File["/srv/lodgeit/${name}"],
proxyexclusions => ['/robots.txt'],
}
file { "/etc/init/${name}-paste.conf":