robots.txt: adapt for Netlify. (#29192)

Since we build with `HUGO_ENV` set to `production` on Netlify, use another variable to prevent crawling.
This commit is contained in:
XhmikosR 2019-08-02 17:26:25 +03:00 committed by GitHub
parent ca408b176b
commit 1ebb8e7d9b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 6 additions and 2 deletions

View File

@ -1,8 +1,12 @@
# www.robotstxt.org
{{ if (eq (getenv "HUGO_ENV") "production") -}}
{{- $isProduction := eq (getenv "HUGO_ENV") "production" -}}
{{- $isNetlify := eq (getenv "NETLIFY") "true" -}}
{{- $allowCrawling := and (not $isNetlify) $isProduction -}}
{{ if $allowCrawling }}
# Allow crawling of all content
{{- end }}
User-agent: *
Disallow:{{ if (ne (getenv "HUGO_ENV") "production") }} /{{ end }}
Disallow:{{ if not $allowCrawling }} /{{ end }}
Sitemap: {{ .Site.BaseURL }}/sitemap.xml