Since we build with `HUGO_ENV` set to `production` on Netlify, use another variable to prevent crawling.
# www.robotstxt.org
-{{ if (eq (getenv "HUGO_ENV") "production") -}}
+{{- $isProduction := eq (getenv "HUGO_ENV") "production" -}}
+{{- $isNetlify := eq (getenv "NETLIFY") "true" -}}
+{{- $allowCrawling := and (not $isNetlify) $isProduction -}}
+
+{{ if $allowCrawling }}
# Allow crawling of all content
{{- end }}
User-agent: *
-Disallow:{{ if (ne (getenv "HUGO_ENV") "production") }} /{{ end }}
+Disallow:{{ if not $allowCrawling }} /{{ end }}
Sitemap: {{ .Site.BaseURL }}/sitemap.xml