mirror of
https://github.com/twbs/bootstrap.git
synced 2024-12-01 13:24:25 +01:00
robots.txt: adapt for Netlify. (#29192)
Since we build with `HUGO_ENV` set to `production` on Netlify, use another variable to prevent crawling.
This commit is contained in:
parent
ca408b176b
commit
1ebb8e7d9b
@ -1,8 +1,12 @@
|
|||||||
# www.robotstxt.org
|
# www.robotstxt.org
|
||||||
|
|
||||||
{{ if (eq (getenv "HUGO_ENV") "production") -}}
|
{{- $isProduction := eq (getenv "HUGO_ENV") "production" -}}
|
||||||
|
{{- $isNetlify := eq (getenv "NETLIFY") "true" -}}
|
||||||
|
{{- $allowCrawling := and (not $isNetlify) $isProduction -}}
|
||||||
|
|
||||||
|
{{ if $allowCrawling }}
|
||||||
# Allow crawling of all content
|
# Allow crawling of all content
|
||||||
{{- end }}
|
{{- end }}
|
||||||
User-agent: *
|
User-agent: *
|
||||||
Disallow:{{ if (ne (getenv "HUGO_ENV") "production") }} /{{ end }}
|
Disallow:{{ if not $allowCrawling }} /{{ end }}
|
||||||
Sitemap: {{ .Site.BaseURL }}/sitemap.xml
|
Sitemap: {{ .Site.BaseURL }}/sitemap.xml
|
||||||
|
Loading…
Reference in New Issue
Block a user