# robots.txt for AdConvert # Updated: 2026-03-31 # Default: Allow all search engine crawlers User-agent: * Allow: / # Disallow private pages (require authentication) Disallow: /app/ Disallow: /admin/ # Disallow authentication pages (not needed in search results) Disallow: /signin/ Disallow: /signup/ Disallow: /auth/ # Disallow dynamic share pages (token-based, not indexable) Disallow: /share/ # Disallow API routes Disallow: /api/ # Next.js static assets Allow: /_next/static/media/ Disallow: /_next/ Disallow: /_vercel/ # Crawl delay (optional, prevents server overload) Crawl-delay: 1 # --- AI Search Engine Crawlers (GEO: Allow for Citations) --- # Google AI Overviews + Search User-agent: Googlebot Allow: / # OpenAI ChatGPT (citations + training) User-agent: GPTBot Allow: / Crawl-delay: 2 # Anthropic Claude (citations) User-agent: ClaudeBot Allow: / Crawl-delay: 2 # Perplexity (real-time search + citations) User-agent: PerplexityBot Allow: / Crawl-delay: 2 # --- Training-Only Crawlers (Block: No Citation Benefit) --- # Google Gemini model training (no search citation benefit) User-agent: Google-Extended Disallow: / # Common Crawl dataset (no direct benefit) User-agent: CCBot Disallow: / # ByteDance/TikTok training (no citation benefit) User-agent: Bytespider Disallow: / # Sitemap location Sitemap: https://adconvert.org/sitemap.xml