# Robots.txt for kanishka.dev - SEO Optimized # Updated: 2025-11-06 # Allow all search engines to crawl everything User-agent: * Allow: / # Important directories - explicitly allowed Allow: /css/ Allow: /js/ Allow: /images/ Allow: /pages/ Allow: /data/ # Disallow unnecessary files (security & performance) Disallow: /node_modules/ Disallow: /.git/ Disallow: /.vscode/ Disallow: /generate-sitemap.js # Major Search Engine Specific Rules User-agent: Googlebot Allow: / Crawl-delay: 0 User-agent: Googlebot-Image Allow: /images/ User-agent: Bingbot Allow: / Crawl-delay: 1 User-agent: Slurp Allow: / Crawl-delay: 1 User-agent: DuckDuckBot Allow: / User-agent: Baiduspider Allow: / Crawl-delay: 2 User-agent: YandexBot Allow: / Crawl-delay: 2 # Sitemap location (for search engine discovery) Sitemap: https://kanishka.dev/sitemap.xml # Host (optional - helps with canonicalization) Host: https://kanishka.dev