1
0
Fork 0
mirror of https://github.com/twbs/bootstrap.git synced 2022-11-09 12:25:43 -05:00
twbs--bootstrap/site/layouts/robots.txt
XhmikosR b8ffcdf9a4
Use hugo.Environment instead of getenv (#29240)
This is set automatically to "development" when the local server is running, and to "production" when Hugo builds the site.
2020-02-22 08:54:43 +02:00

12 lines
360 B
Text

# www.robotstxt.org
{{- $isProduction := eq hugo.Environment "production" -}}
{{- $isNetlify := eq (getenv "NETLIFY") "true" -}}
{{- $allowCrawling := and (not $isNetlify) $isProduction -}}
{{ if $allowCrawling }}
# Allow crawling of all content
{{- end }}
User-agent: *
Disallow:{{ if not $allowCrawling }} /{{ end }}
Sitemap: {{ "/sitemap.xml" | absURL }}