Disallow robots for wiki-dev site

We don't want old, stale copies of our production wiki content
showing up in search engines, so set the mediawiki module parameter
that disallows robots from indexing the site.

Change-Id: If8a2f2c2c00715ecce0ac1aa279f649ec84496a1
Depends-On: Ic62a72555315bd344db338809920a3605f17c8c6
This commit is contained in:
Jeremy Stanley 2016-09-07 20:47:27 +00:00
parent 3c303edc45
commit 62fc83c9a9
2 changed files with 3 additions and 0 deletions

View File

@ -407,6 +407,7 @@ node /^wiki-dev\d+\.openstack\.org$/ {
wg_upgradekey => hiera('wg_upgradekey'),
wg_recaptchasitekey => hiera('wg_recaptchasitekey'),
wg_recaptchasecretkey => hiera('wg_recaptchasecretkey'),
disallow_robots => true,
}
}

View File

@ -17,6 +17,7 @@ class openstack_project::wiki (
$wg_recaptchasitekey = undef,
$wg_recaptchasecretkey = undef,
$wg_googleanalyticsaccount = undef,
$disallow_robots = undef,
) {
package { ['openssl', 'ssl-cert', 'subversion']:
@ -54,6 +55,7 @@ class openstack_project::wiki (
wg_googleanalyticsaccount => $wg_googleanalyticsaccount,
wg_sitename => 'OpenStack',
wg_logo => "https://${site_hostname}/w/images/thumb/c/c4/OpenStack_Logo_-_notext.png/30px-OpenStack_Logo_-_notext.png",
disallow_robots => $disallow_robots,
}
class { 'memcached':
max_memory => 2048,