robots.txt: disallow crawling when not in production. (#27559)

This commit is contained in:
XhmikosR 2018-11-02 12:41:57 +02:00 committed by GitHub
parent 4b15ec9cad
commit 3256a2c231
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 1 deletions

View File

@ -5,5 +5,5 @@
# Allow crawling of all content
User-agent: *
Disallow:
Disallow:{% if jekyll.environment != "production" %} /{% endif %}
Sitemap: {{ site.url }}/sitemap.xml