# robots.txt - Optimized for Google, Bing, and AI Search Engines (ChatGPT, Perplexity, Claude) # === User Agents: Traditional Search Engines === User-agent: Googlebot Allow: / Crawl-delay: 0 User-agent: Bingbot Allow: / Crawl-delay: 0 User-agent: Yandex Allow: / Crawl-delay: 1 User-agent: Slurp # Yahoo Allow: / Crawl-delay: 1 User-agent: DuckDuckBot Allow: / Crawl-delay: 0 # === User Agents: AI Search Engines (GEO - Generative Engine Optimization) === User-agent: GPTBot # ChatGPT Search / OpenAI Allow: / Crawl-delay: 0 User-agent: ChatGPT-User # ChatGPT browsing Allow: / Crawl-delay: 0 User-agent: PerplexityBot # Perplexity AI Allow: / Crawl-delay: 0 User-agent: Claude-Web # Anthropic Claude Allow: / Crawl-delay: 0 User-agent: Anthropic-AI # Anthropic AI Allow: / Crawl-delay: 0 User-agent: Applebot # Apple Siri Allow: / Crawl-delay: 0 User-agent: Amazonbot # Amazon Alexa Allow: / Crawl-delay: 1 User-agent: CCBot # Common Crawl (used by many AI models) Allow: / Crawl-delay: 1 User-agent: FacebookBot # Meta AI Allow: / Crawl-delay: 1 User-agent: Meta-ExternalAgent # Meta AI agents Allow: / Crawl-delay: 1 User-agent: Diffbot # AI content extraction Allow: / Crawl-delay: 1 User-agent: cohere-ai # Cohere AI Allow: / Crawl-delay: 1 # === Default Rule for All Other Bots === User-agent: * Allow: / # === Disallow: Admin, API, Auth Routes === Disallow: /api/auth/ Disallow: /admin/ Disallow: /api/admin/ Disallow: /api/cart/ Disallow: /api/checkout/ Disallow: /api/webhook/ Disallow: /_next/ Disallow: /studio/ # === Disallow: Search Parameters (Avoid Duplicate Content) === Disallow: /*?*page= Disallow: /*?*sort= Disallow: /*?*filter= Disallow: /*& # === Allow: Important Content === Allow: /product/* Allow: /category/* Allow: /clicky2-0/* Allow: /store/* # === Sitemaps === Sitemap: https://notfar.com/sitemap.xml Sitemap: https://notfar.com/sitemap-products.xml Sitemap: https://notfar.com/sitemap-categories.xml # === Host (Optional - for preferred domain) === # Host: https://notfar.com