# robots.txt for Clikkle - Enhanced SEO Configuration # This file tells search engine crawlers which pages they can access # Main crawler configuration User-agent: * Allow: / # Allow crawling of all pages Disallow: # Block specific directories and files that shouldn't be indexed Disallow: /api/ Disallow: /_next/ Disallow: /static/ Disallow: /private/ Disallow: /admin/ Disallow: /dashboard/ Disallow: /*?*sort= Disallow: /*?*filter= Disallow: /*?*session= Disallow: /*?*token= # Block specific file types Disallow: /*.pdf$ Disallow: /*.doc$ Disallow: /*.docx$ # Block search result pages and parameters Disallow: /*?*search= Disallow: /*?*query= Disallow: /*?*keyword= # Block user-specific pages Disallow: /*/user/ Disallow: /*/profile/ Disallow: /*/settings/ # Sitemap locations Sitemap: https://clikkle.com/sitemap.xml # Crawl delay to be respectful Crawl-delay: 1 # Specific bot configurations User-agent: Googlebot Allow: / Crawl-delay: 0.5 User-agent: Bingbot Allow: / Crawl-delay: 1 User-agent: Slurp Allow: / Crawl-delay: 1 User-agent: DuckDuckBot Allow: / Crawl-delay: 1 User-agent: Baiduspider Allow: / Crawl-delay: 2 User-agent: Yandex Allow: / Crawl-delay: 2 # AI and ML bots User-agent: ChatGPT-User Allow: / User-agent: Claude-Web Allow: / User-agent: CCBot Allow: / # Social media bots User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot Allow: / # SEO and analytics bots User-agent: AhrefsBot Allow: / Crawl-delay: 2 User-agent: SemrushBot Allow: / Crawl-delay: 2 User-agent: MJ12bot Allow: / Crawl-delay: 3 User-agent: DotBot Allow: / Crawl-delay: 2