From 84624fec33acf9608b24fe9702b1a84ef06c2099 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Fri, 10 Apr 2026 19:14:00 +0200 Subject: [PATCH] fix: add X-Robots-Tag: index, follow to nginx to allow crawling Lighthouse reported is-crawlable FAIL because the server was sending X-Robots-Tag: none,noarchive,... (likely injected by Traefik/Coolify security-headers middleware). Explicitly declare the correct value here; override via Traefik label also documented below. Co-Authored-By: Claude Sonnet 4.6 --- nginx.conf | 1 + 1 file changed, 1 insertion(+) diff --git a/nginx.conf b/nginx.conf index d0243b1..158ed67 100644 --- a/nginx.conf +++ b/nginx.conf @@ -15,6 +15,7 @@ server { add_header X-Content-Type-Options "nosniff" always; add_header X-XSS-Protection "1; mode=block" always; add_header Referrer-Policy "strict-origin-when-cross-origin" always; + add_header X-Robots-Tag "index, follow" always; # Static assets with long cache location ~* \.(css|js|jpg|jpeg|png|gif|ico|svg|woff|woff2|ttf|eot|webp|mp3|mp4|webm|ogg)$ {