# Global rules: Allow all crawlers by default, then override specifics User-agent: * Allow: / Disallow: /admin/ Disallow: /api/ Disallow: /c/ # User chats (privacy) Disallow: /workspace/ # Internal Disallow: /error # Errors Disallow: /.env Disallow: /config Disallow: /logs Disallow: /debug # Allow important SEO/static paths Allow: /auth Allow: /dashboard Allow: /upgrade Allow: /channels Allow: /static/ Allow: /sitemap.xml Allow: /*.css Allow: /*.js Allow: /*.png$ Allow: /*.jpg$ Allow: /*.jpeg$ Allow: /*.gif$ Allow: /*.svg$ Allow: /*.ico$ Allow: /*.webp$ Allow: /favicon.ico # Block AI training/scrapers (2025 expanded list: GPT, Claude, Perplexity, etc.) User-agent: GPTBot Disallow: / User-agent: ClaudeBot Disallow: / User-agent: Claude-Web Disallow: / User-agent: anthropic-ai Disallow: / User-agent: PerplexityBot Disallow: / User-agent: CCBot Disallow: / User-agent: img2dataset Disallow: / User-agent: Google-Extended Disallow: / User-agent: Applebot-Extended Disallow: / User-agent: Omgilibot Disallow: / User-agent: Omgili Disallow: / User-agent: FacebookBot Disallow: / User-agent: Bytespider Disallow: / User-agent: magpie-crawler Disallow: / User-agent: YouBot Disallow: / # Allow major search engines (with crawl-delays for politeness) User-agent: Googlebot Allow: / Crawl-delay: 0.5 User-agent: Bingbot Allow: / Crawl-delay: 1 User-agent: Slurp # Yahoo Allow: / Crawl-delay: 1 User-agent: DuckDuckBot Allow: / Crawl-delay: 1 User-agent: Baiduspider Allow: / Crawl-delay: 2 User-agent: YandexBot Allow: / Crawl-delay: 2 # Allow social media for shares (OG previews) User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot Allow: / User-agent: WhatsApp Allow: / User-agent: TelegramBot Allow: / # Sitemap (submit to GSC/Bing) Sitemap: https://spec-chat.tech/sitemap.xml # Sitemap location Sitemap: https://spec-chat.tech/sitemap.xml # Host Host: https://spec-chat.tech